Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
com.ning.billing.meter.timeline.persistent.TimelineSqlDao.sql.stg Maven / Gradle / Ivy
group TimelineSqlDao;
CHECK_TENANT() ::= "tenant_record_id = :tenantRecordId"
AND_CHECK_TENANT() ::= "AND "
getSourceRecordId() ::= <<
select
record_id
from sources
where source = :source
;
>>
getSourceName() ::= <<
select
source
from sources
where record_id = :recordId
;
>>
getSources() ::= <<
select
record_id
, source
from sources
where
;
>>
addSource() ::= <<
insert into sources (
source
, created_date
, created_by
, updated_date
, updated_by
, account_record_id
, tenant_record_id
) values (
:source
, :createdDate
, :userName
, :updatedDate
, :userName
, :accountRecordId
, :tenantRecordId
);
>>
getCategoryRecordId() ::= <<
select
record_id
from categories
where category = :category
;
>>
getCategory() ::= <<
select
category
from categories
where record_id = :recordId
;
>>
getCategories() ::= <<
select
record_id
, category
from categories
where
;
>>
addCategory() ::= <<
insert into categories (
category
, created_date
, created_by
, updated_date
, updated_by
, tenant_record_id
) values (
:category
, :createdDate
, :userName
, :updatedDate
, :userName
, :tenantRecordId
);
>>
getMetricRecordId() ::= <<
select
record_id
from metrics
where metric = :metric
and category_record_id = :categoryRecordId
;
>>
getCategoryRecordIdAndMetric() ::= <<
select
category_record_id
, metric
from metrics
where record_id = :recordId
;
>>
getMetric() ::= <<
select
metric
from metrics
where record_id = :recordId
;
>>
getMetrics() ::= <<
select
record_id
, category_record_id
, metric
from metrics
where
;
>>
addMetric() ::= <<
insert into metrics (
category_record_id
, metric
, created_date
, created_by
, updated_date
, updated_by
, tenant_record_id
) values (
:categoryRecordId
, :metric
, :createdDate
, :userName
, :updatedDate
, :userName
, :tenantRecordId
);
>>
getLastInsertedRecordId() ::= <<
select last_insert_id();
>>
insertTimelineChunk() ::= <<
insert into timeline_chunks (
record_id
, source_record_id
, metric_record_id
, sample_count
, start_time
, end_time
, in_row_samples
, blob_samples
, aggregation_level
, not_valid
, dont_aggregate
, account_record_id
, tenant_record_id
) values (
:chunkId
, :sourceRecordId
, :metricRecordId
, :sampleCount
, :startTime
, :endTime
, :inRowSamples
, :blobSamples
, :aggregationLevel
, :notValid
, :dontAggregate
, :accountRecordId
, :tenantRecordId
);
>>
bulkInsertTimelineChunks() ::= ""
getSamplesBySourceRecordIdsAndMetricRecordIds(sourceIds, metricIds) ::= <<
select
record_id
, metric_record_id
, source_record_id
, sample_count
, in_row_samples
, blob_samples
, start_time
, end_time
, aggregation_level
, not_valid
, dont_aggregate
from timeline_chunks
where end_time >= :startTime
and start_time \<= :endTime
and source_record_id in ()
and metric_record_id in ()
and not_valid = 0
order by source_record_id, metric_record_id, start_time asc
;
>>
insertLastStartTimes() ::= <<
insert into last_start_times (
time_inserted
, start_times
, account_record_id
, tenant_record_id
) values (
:timeInserted
, :startTimes
, :accountRecordId
, :tenantRecordId
);
>>
getLastStartTimes() ::= <<
select
time_inserted
, start_times
from last_start_times
where
order by time_inserted desc
limit 1
>>
deleteLastStartTimes() ::= <<
delete from last_start_times where ;
>>
test() ::= <<
select 1 from timeline_chunks where limit 1;
>>