Professional Documents
Culture Documents
- SCT COMMIT is not supported on tables with one or more LOB fields
- All master tables in a MVIEW be created with COMMIT SCN
- We also added references columns in the SELECT or JOIN the MVIEW
CREATE MATERIALIZED VIEW LOG ON sales
WITH SEQUENCE, ROWID (prod_id, time_id, amount_sold),
COMMIT SCN INCLUDING NEW VALUES;
- We can specify that updates a MVIEW every time you make a COMMIT
CREATE MATERIALIZED VIEW sales_mv_current REFRESH FAST ON COMMIT AS
SELECT t.calendar_year, p.prod_id, SUM(s.amount_sold) AS sum_sales
FROM times t, products p, sales s
WHERE t.time_id = s.time_id
AND p.prod_id = s.prod_id
GROUP BY t.calendar_year, p.prod_id;
PLATFORM_NAME
ENDIAN_FORMAT
---------------------------------- -------------Microsoft Windows IA (32-bit)
Little
EXECUTE DBMS_TTS.TRANSPORT_SET_CHECK('sales_1,sales_2', TRUE);
SELECT * FROM TRANSPORT_SET_VIOLATIONS;
VIOLATIONS
--------------------------------------------------------------------------Constraint DEPT_FK between table JIM.EMP in tablespace SALES_1 and table
JIM.DEPT in tablespace OTHER
Partitioned table JIM.SALES is partially contained in the transportable set
SQL> ALTER TABLESPACE sales_1 READ ONLY;
Tablespace altered.
SQL> ALTER TABLESPACE sales_2 READ ONLY;
Tablespace altered.
SQL> HOST
$ expdp system dumpfile=expdat.dmp directory=data_pump_dir transport_tablespaces
=sales_1,sales_2 logfile=tts_export.log
OR
$ expdp system dumpfile=expdat.dmp directory=data_pump_dir transport_tablespaces
=sales_1,sales_2 transport_full_check=y logfile=tts_export.log
Output
-----******************************************************************************
Dump file set for SYSTEM.SYS_EXPORT_TRANSPORTABLE_01 is:
/u01/app/oracle/admin/salesdb/dpdump/expdat.dmp
******************************************************************************
Datafiles required for transportable tablespace SALES_1:
/u01/app/oracle/oradata/salesdb/sales_101.dbf
Datafiles required for transportable tablespace SALES_2:
/u01/app/oracle/oradata/salesdb/sales_201.dbf
Cross Platform conversion
-------------------------RMAN> CONVERT TABLESPACE sales_1,sales_2
2> TO PLATFORM 'Microsoft Windows IA (32-bit)'
3> FORMAT '/tmp/%U';
Starting conversion at source at 30-SEP-08
using channel ORA_DISK_1
channel ORA_DISK_1: starting datafile conversion
input datafile file number=00007 name=/u01/app/oracle/oradata/salesdb/sales_101.
dbf
converted datafile=/tmp/data_D-SALESDB_I-1192614013_TS-SALES_1_FNO-7_03jru08s
channel ORA_DISK_1: datafile conversion complete, elapsed time: 00:00:45
channel ORA_DISK_1: starting datafile conversion
input datafile file number=00008 name=/u01/app/oracle/oradata/salesdb/sales_201.
dbf
converted datafile=/tmp/data_D-SALESDB_I-1192614013_TS-SALES_2_FNO-8_04jru0aa
channel ORA_DISK_1: datafile conversion complete, elapsed time: 00:00:25
PARALLEL => Provide for parallelism in the sentences if certain conditions are
met
- REJECT_LIMIT => Maximum rechazos allowed (per server PX)
- All parameters in the driver ORACLE_LOADER
http://docs.oracle.com/cd/E11882_01/server.112/e22490/et_params.htm # SUTIL01
2
CREATE TABLE admin_ext_employees
(employee_id
NUMBER(4),
first_name
VARCHAR2(20),
last_name
VARCHAR2(25),
job_id
manager_id
hire_date
salary
commission_pct
department_id
email
VARCHAR2(10),
NUMBER(4),
DATE,
NUMBER(8,2),
NUMBER(2,2),
NUMBER(4),
VARCHAR2(25)
)
ORGANIZATION EXTERNAL
(
TYPE ORACLE_LOADER
DEFAULT DIRECTORY admin_dat_dir
ACCESS PARAMETERS
(
records delimited by newline
badfile admin_bad_dir:'empxt%a_%p.bad'
logfile admin_log_dir:'empxt%a_%p.log'
fields terminated by ','
missing field values are null
( employee_id, first_name, last_name, job_id, manager_id,
hire_date char date_format date mask "dd-mon-yyyy",
salary, commission_pct, department_id, email
)
)
LOCATION ('empxt1.dat', 'empxt2.dat')
)
PARALLEL
REJECT LIMIT UNLIMITED;
- We launched a test query
SELECT * FROM ADMIN_EXT_EMPLOYEES;
- If we need to do an INSERT ... SELECT and there are many data => enable PARALL
EL DML
-- Ej. INSERT INTO EMPLOYEES (...) SELECT * FROM ADMIN_EXT_EMPLOYEES;
ALTER SESSION ENABLE PARALLEL DML;
Preprocessor
--------------# Create a directory for storing the archives
mkdir -p / u01/stage/zdata
# Comprimos the two source files of the previous year
cp / u01/stage/data / *. DAT / u01/stage/zdata
cd /u01/stage/zdata
gzip *.dat
# We also need to copy the executable to read the archives "zcat"
cp /bin/zcat /u01/stage/zdata
- Create the DIRECTORY in Oracle
CREATE OR REPLACE DIRECTORY admin_zdat_dir AS '/u01/stage/zdata';
- In addition to reading, we need to give execute permission to use "zcat"
GRANT READ, EXECUTE ON DIRECTORY admin_zdat_dir TO HR;
- Create the table with the difference PREPROCESSOR argument which indicated tha
t "zcat" is used
CREATE TABLE admin_ext_employees_gzip
(employee_id
NUMBER(4),
first_name
last_name
job_id
manager_id
hire_date
salary
commission_pct
department_id
email
VARCHAR2(20),
VARCHAR2(25),
VARCHAR2(10),
NUMBER(4),
DATE,
NUMBER(8,2),
NUMBER(2,2),
NUMBER(4),
VARCHAR2(25)
)
ORGANIZATION EXTERNAL
(
TYPE ORACLE_LOADER
DEFAULT DIRECTORY admin_zdat_dir
ACCESS PARAMETERS
(
records delimited by newline
badfile admin_bad_dir:'empxt%a_%p.bad'
preprocessor admin_zdat_dir: 'zcat'
logfile admin_log_dir:'empxt%a_%p.log'
fields terminated by ','
missing field values are null
( employee_id, first_name, last_name, job_id, manager_id,
hire_date char date_format date mask "dd-mon-yyyy",
salary, commission_pct, department_id, email
)
)
LOCATION ('empxt1.dat.gz', 'empxt2.dat.gz')
)
PARALLEL
REJECT LIMIT UNLIMITED;
Compression
-------------- We see an example of using the driver
- We writable directory al ADMIN_DAT_DIR
GRANT WRITE ON DIRECTORY admin_dat_dir TO HR;
- We connect with the HR user
CONN HR/hr
- Create a CTAS data from another table
- COMPRESSION => enabled (ENABLED)
CREATE TABLE admin_ext_employees_dump
ORGANIZATION EXTERNAL (TYPE ORACLE_DATAPUMP DEFAULT DIRECTORY admin_dat_dir
ACCESS PARAMETERS (COMPRESSION ENABLED) LOCATION ('emp.dmp'))
AS
SELECT * FROM EMPLOYEES;
- We can use the DUMP generated to create a new external table
CREATE TABLE admin_ext_employees_dump2
(
EMPLOYEE_ID
NUMBER(6),
FIRST_NAME
VARCHAR2(20),
LAST_NAME
VARCHAR2(25),
EMAIL
VARCHAR2(25),
PHONE_NUMBER
VARCHAR2(20),
HIRE_DATE
DATE,
JOB_ID
VARCHAR2(10),
SALARY
COMMISSION_PCT
MANAGER_ID
DEPARTMENT_ID
NUMBER(8,2),
NUMBER(2,2),
NUMBER(6),
NUMBER(4)
)
ORGANIZATION EXTERNAL
(
TYPE ORACLE_DATAPUMP
DEFAULT DIRECTORY admin_dat_dir
LOCATION ('emp.dmp')
);
Implement Data Pump Export and Import Jobs for Data Transfer
------------------------------------------------------------- We consult the route pointed to the DIRECTORY DATA_PUMP_DIR
SELECT DIRECTORY_PATH FROM DBA_DIRECTORIES WHERE DIRECTORY_NAME='DATA_PUMP_DIR';
expdp system SCHEMAS=HR DIRECTORY=DATA_PUMP_DIR DUMPFILE=exp_hr_20130613.dmp LOG
FILE=exp_hr_20130613.log
expdp USERID=\"/ as sysdba\" FULL=Y DIRECTORY=DATA_PUMP_DIR COMPRESSION=ALL DUMP
FILE=exp_full_20130613.dmp
# Let's see how we can use a file for all parameters of the Export
vi /u01/stage/expdp_parfile.par
# Add the following lines
CONTENT=METADATA_ONLY
TABLES=HR.EMPLOYEES,HR.DEPARTMENTS
EXCLUDE=STATISTICS
DIRECTORY=DATA_PUMP_DIR
DUMPFILE = exp_employees_20130613.dmp
LOFILE = exp_employees_20130613.log
# Run the export file with input parameters
# CONTENT = METADATA => Only export the metadata (Table Definitions)
# EXCLUDE = STATISTICS => We decided that we do not want to export statistics of
these objects
expdp system PARFILE=/u01/stage/expdp_parfile.par
- Create a DIRECTORY of an alternative location
CREATE DIRECTORY TEMP_DIR AS '/u01/stage';
# We launched an Export of the SH schema in 6 parallel processes
# We see that we generate the following files
# / U01/app/oracle/admin/OCM/dpdump/exp_sales_01_20130613.dmp
# / U01/stage/exp_sales_01_20130613.dmp
# / U01/app/oracle/admin/OCM/dpdump/exp_sales_02_20130613.dmp
# / U01/stage/exp_sales_02_20130613.dmp
# / U01/app/oracle/admin/OCM/dpdump/exp_sales_03_20130613.dmp
# / U01/stage/exp_sales_03_20130613.dmp
# / U01/app/oracle/admin/OCM/dpdump/exp_sales_04_20130613.dmp
expdp system PARALLEL=8 SCHEMAS=SH \
DUMPFILE = DATA_PUMP_DIR: exp_sales_% U_20130613.dmp, TEMP_DIR: exp_sales_% U_
20130613.dmp \
LOGFILE = DATA_PUMP_DIR: exp_sales_20130613.dmp
# Activate the FLASHBACK_TIME parameter for a EXPORT
# To do this you have to give EXPORT permissions READ, WRITE the user HR
end;
/
- We see the result
SELECT * FROM DBA_RSRC_IO_CALIBRATE;
- Oracle checks apply Parallel Execution
EXPLAIN PLAN FOR
SELECT customers.cust_first_name, customers.cust_last_name,
MAX(QUANTITY_SOLD), AVG(QUANTITY_SOLD)
FROM sh.sales, sh.customers
WHERE sales.cust_id=customers.cust_id
GROUP BY customers.cust_first_name, customers.cust_last_name;
- We will see that not apply because the runtime is below the minimum threshold
- One sentence should last more PARALLEL_MIN_TIME_THRESHOLD of seconds to apply
PX
-- Note
-- ------ automatic DOP: Computed Degree of Parallelism is 1 because of parallel
threshold
SELECT PLAN_TABLE_OUTPUT FROM TABLE(DBMS_XPLAN.DISPLAY);
Enable the parameter to AUTO PARALLEL_DEGREE_LIMIT
Change default DEGREE with the objects we want to use the PX
ALTER TABLE SH.SALES PARALLEL 4;
ALTER TABLE SH.SALES PARALLEL (DEGREE DEFAULT );
SELECT /*+ PARALLEL(SALES,4) */ SUM(AMOUNT_SOLD) FROM SH.SALES;
ALTER
ALTER
ALTER
ALTER
ALTER
ALTER
ALTER
ALTER
SESSION
SESSION
SESSION
SESSION
ENABLE
ENABLE
ENABLE
ENABLE
PARALLEL
PARALLEL
PARALLEL
PARALLEL
QUERY;
PARALLEL QUERY 5;
DML;
DDL;