top of page

Results found for ""

  • SQL Project - Supermarket Billing

    SQL Project - Supermarket Billing The goal of the project is to create a database for a supermarket to efficiently manage inventory, sales, and customer information. 1. Products Table creation and insert sample data: Table Creation: CREATE TABLE mkt_Products ( ProductID INT PRIMARY KEY, ProductName VARCHAR(50), Category VARCHAR(50), Price DECIMAL(10, 2), StockQuantity INT ); Insert Data: INSERT ALL INTO mkt_Products VALUES (1, 'Laptop', 'Electronics', 800.00, 50) INTO mkt_Products VALUES (2, 'Smartphone', 'Electronics', 500.00, 100) INTO mkt_Products VALUES (3, 'Desk Chair', 'Furniture', 120.00, 25) INTO mkt_Products VALUES (4, 'Coffee Maker', 'Appliances', 40.00, 30) INTO mkt_Products VALUES (5, 'Running Shoes', 'Apparel', 80.00, 50) INTO mkt_Products VALUES (6, 'Bookshelf', 'Furniture', 150.00, 20) INTO mkt_Products VALUES (7, 'Backpack', 'Accessories', 30.00, 40) INTO mkt_Products VALUES (8, 'Microwave', 'Appliances', 70.00, 15) INTO mkt_Products VALUES (9, 'Office Desk', 'Furniture', 200.00, 10) INTO mkt_Products VALUES (10, 'T-shirt', 'Apparel', 15.00, 75) SELECT * FROM dual; 2. Clients Table creation and insert sample data: Table Creation: CREATE TABLE mkt_Clients ( ClientID INT PRIMARY KEY, FirstName VARCHAR(50), LastName VARCHAR(50), Email VARCHAR(100), Phone VARCHAR(15) ); Insert Data: INSERT ALL INTO mkt_Clients VALUES (1, 'John', 'Doe', 'john.doe@example.com', '555-1234') INTO mkt_Clients VALUES (2, 'Jane', 'Smith', 'jane.smith@example.com', '555-5678') INTO mkt_Clients VALUES (3, 'Robert', 'Johnson', 'robert.j@example.com', '555-9876') INTO mkt_Clients VALUES (4, 'Emily', 'Davis', 'emily.davis@example.com', '555-4321') INTO mkt_Clients VALUES (5, 'Michael', 'Wilson', 'michael.w@example.com', '555-8765') INTO mkt_Clients VALUES (6, 'Lisa', 'Miller', 'lisa.miller@example.com', '555-2345') INTO mkt_Clients VALUES (7, 'David', 'Brown', 'david.brown@example.com', '555-6789') INTO mkt_Clients VALUES (8, 'Sarah', 'Turner', 'sarah.turner@example.com', '555-3456') INTO mkt_Clients VALUES (9, 'Kevin', 'Harris', 'kevin.harris@example.com', '555-7890') INTO mkt_Clients VALUES (10, 'Emma', 'Taylor', 'emma.t@example.com', '555-4567') SELECT * FROM dual; 3. Sales Table creation and insert sample data: Table Creation: CREATE TABLE mkt_Sales ( SaleID INT PRIMARY KEY, ProductID INT, ClientID INT, SaleDate DATE, Quantity INT, TotalAmount DECIMAL(10, 2), FOREIGN KEY (ProductID) REFERENCES mkt_Products(ProductID), FOREIGN KEY (ClientID) REFERENCES mkt_Clients(ClientID) ); Insert Data: INSERT ALL INTO mkt_Sales VALUES (1, 1, 1, TO_DATE('2024-01-01', 'YYYY-MM-DD'), 2, 1600.00) INTO mkt_Sales VALUES (2, 3, 2, TO_DATE('2024-01-03', 'YYYY-MM-DD'), 1, 120.00) INTO mkt_Sales VALUES (3, 7, 4, TO_DATE('2024-01-05', 'YYYY-MM-DD'), 3, 90.00) INTO mkt_Sales VALUES (4, 5, 5, TO_DATE('2024-01-08', 'YYYY-MM-DD'), 5, 400.00) INTO mkt_Sales VALUES (5, 9, 3, TO_DATE('2024-01-10', 'YYYY-MM-DD'), 1, 200.00) INTO mkt_Sales VALUES (6, 2, 6, TO_DATE('2024-01-12', 'YYYY-MM-DD'), 2, 1000.00) INTO mkt_Sales VALUES (7, 4, 7, TO_DATE('2024-01-15', 'YYYY-MM-DD'), 4, 160.00) INTO mkt_Sales VALUES (8, 8, 9, TO_DATE('2024-01-18', 'YYYY-MM-DD'), 1, 70.00) INTO mkt_Sales VALUES (9, 6, 8, TO_DATE('2024-01-20', 'YYYY-MM-DD'), 3, 450.00) INTO mkt_Sales VALUES (10, 10, 10, TO_DATE('2024-01-22', 'YYYY-MM-DD'), 2, 30.00) SELECT * FROM dual; 4. Suppliers Table creation and insert sample data: Table Creation: CREATE TABLE mkt_Suppliers ( SupplierID INT PRIMARY KEY, SupplierName VARCHAR(50), ContactPerson VARCHAR(50), Email VARCHAR(100), Phone VARCHAR(15), FOREIGN KEY (SupplierID) REFERENCES mkt_Sales(SaleID) ); Insert Data: INSERT ALL INTO mkt_Suppliers VALUES (1, 'Tech Distributors', 'Mark Johnson', 'info@techdist.com', '555-1111') INTO mkt_Suppliers VALUES (2, 'Furniture Mart', 'Sarah Smith', 'sales@furnituremart.com', '555-2222') INTO mkt_Suppliers VALUES (3, 'Appliance World', 'David Brown', 'support@applianceworld.com', '555-3333') INTO mkt_Suppliers VALUES (4, 'Shoe Wholesalers', 'Emily Davis', 'sales@shoewholesale.com', '555-4444') INTO mkt_Suppliers VALUES (5, 'Book Suppliers', 'Michael Wilson', 'books@suppliers.com', '555-5555') INTO mkt_Suppliers VALUES (6, 'Office Solutions', 'Lisa Miller', 'info@officesolutions.com', '555-6666') INTO mkt_Suppliers VALUES (7, 'Accessory Hub', 'Robert Johnson', 'contact@accessoryhub.com', '555-7777') INTO mkt_Suppliers VALUES (8, 'Kitchen Essentials', 'Jane Smith', 'info@kitchenessentials.com', '555-8888') INTO mkt_Suppliers VALUES (9, 'Apparel World', 'Kevin Harris', 'info@apparelworld.com', '555-9999') INTO mkt_Suppliers VALUES (10, 'Sports Gear Inc.', 'Emma Taylor', 'sales@sportsgear.com', '555-0000') SELECT * FROM dual; 5. Employee Details creation and insert sample data: Table Creation: CREATE TABLE mkt_EmployeeDetails ( EmployeeID INT PRIMARY KEY, FirstName VARCHAR(50), LastName VARCHAR(50), Email VARCHAR(100), Phone VARCHAR(15) ); Insert Data: INSERT ALL INTO mkt_EmployeeDetails VALUES (1, 'Adam', 'Johnson', 'adam.j@example.com', '555-1234') INTO mkt_EmployeeDetails VALUES (2, 'Olivia', 'Brown', 'olivia.b@example.com', '555-5678') INTO mkt_EmployeeDetails VALUES (3, 'Liam', 'Miller', 'liam.m@example.com', '555-9876') INTO mkt_EmployeeDetails VALUES (4, 'Ava', 'Davis', 'ava.d@example.com', '555-4321') INTO mkt_EmployeeDetails VALUES (5, 'Noah', 'Wilson', 'noah.w@example.com', '555-8765') INTO mkt_EmployeeDetails VALUES (6, 'Sophia', 'Smith', 'sophia.s@example.com', '555-2345') INTO mkt_EmployeeDetails VALUES (7, 'Jackson', 'Turner', 'jackson.t@example.com', '555-6789') INTO mkt_EmployeeDetails VALUES (8, 'Emma', 'Harris', 'emma.h@example.com', '555-3456') INTO mkt_EmployeeDetails VALUES (9, 'Aiden', 'Taylor', 'aiden.t@example.com', '555-7890') INTO mkt_EmployeeDetails VALUES (10, 'Grace', 'Jones', 'grace.j@example.com', '555-1111') SELECT * FROM dual; 6. Employees Table creation and insert sample data: Create mkt_Employees table with Employee_Position: CREATE TABLE mkt_Employees ( SaleID INT PRIMARY KEY, ProductID INT, ClientID INT, EmployeeID INT, SaleDate DATE, Quantity INT, Employee_Position VARCHAR(50), -- Add this column FOREIGN KEY (EmployeeID) REFERENCES mkt_EmployeeDetails(EmployeeID) ); Insert data into mkt_Employees from mkt_EmployeeDetails, mkt_Products, mkt_Clients, and mkt_Sales: INSERT INTO mkt_Employees (SaleID, ProductID, ClientID, EmployeeID, SaleDate, Quantity) SELECT s.SaleID, p.ProductID, c.ClientID, ed.EmployeeID, s.SaleDate, s.Quantity FROM mkt_Sales s JOIN mkt_Products p ON s.ProductID = p.ProductID JOIN mkt_Clients c ON s.ClientID = c.ClientID JOIN mkt_EmployeeDetails ed ON s.ClientID = ed.EmployeeID; Update the job positions in mkt_Employees: UPDATE mkt_Employees SET Employee_Position = ( CASE WHEN EmployeeID = 1 THEN 'Sales Associate' WHEN EmployeeID = 2 THEN 'Manager' WHEN EmployeeID = 3 THEN 'Customer Support' WHEN EmployeeID = 4 THEN 'Warehouse Clerk' WHEN EmployeeID = 5 THEN 'IT Specialist' WHEN EmployeeID = 6 THEN 'Marketing' WHEN EmployeeID = 7 THEN 'HR Manager' WHEN EmployeeID = 8 THEN 'Finance' WHEN EmployeeID = 9 THEN 'Logistics' WHEN EmployeeID = 10 THEN 'Research Analyst' ELSE NULL END ); Write Queries: To retrieve product details, customer information, and sales records. To generate sales details based on the product. To find remaining stock. Related Posts Heading 2 Add paragraph text. Click “Edit Text” to customize this theme across your site. You can update and reuse text themes.

  • Projects | DBA Genesis Support

    Projects SQL Project - Supermarket Billing The goal of the project is to create a database for a supermarket to efficiently manage inventory, sales, and customer information. SQL Project - Library Management The goal of the project is to create a database for a library management system that allows users to efficiently manage customer information, book inventory & book borrowings Linux Project - Monitor Server Disk Space In this Linux project we will write a shell script that will monitor the disk space and put the info in a file every 20 mints. Setting up...

  • Schedule RMAN Backup on Windows

    Schedule RMAN Backup on Windows Scheduling RMAN backups on a windows machine is little different than Linux machine. Windows uses bash shell and windows scheduler to automate RMAN backups. Create RMAN Backup Script File Create Bash Shell Add Schedule to Windows Scheduler Create RMAN Backup Script File We will create a backup.cmd file with our RMAN run block for database backup run{ crosscheck backup; backup database plus archivelog; }; Create Bash Shell Create a proddb_rman_backup.bat (.bat is a windows bash shell) file which will call above file set oracle_sid=proddb rman target sys/sys@ecccatalog rman_rc/rman_rc@rcat cmdfile='F:\backup.cmd' log='F:\rman_backup_full\backup.log' Add Schedule to Windows Scheduler Schedule the above .bat script in windows scheduler Launch the Task Scheduler from start menu On the right-hand panel, click on Create Task Give a name to your task Click on Triggers tab >> New and define the schedule Make sure you set proper frequency like daily or weekly and set proper start date and time. Click on Actions tab >> New and provide the location of .bat file you created Click on OK and task will be created. You can even run the task for testing by right clicking on the task name and select Run Related Posts Heading 2 Add paragraph text. Click “Edit Text” to customize this theme across your site. You can update and reuse text themes.

  • Install Neo4j on Docker

    Install Neo4j on Docker On Windows platform: Install Docker by going to the website https://docs.docker.com/desktop/windows/install/ and downloading the docker file. Note: A 64-bit processor and 4GB system RAM are the hardware prerequisites required to successfully run Docker on Windows 10. Double-click on the Docker Desktop Installer.exe to run the installer. Once you start the installation process, always enable Hyper-V Windows Feature on the Configuration page and follow the installation process to allow the installer and wait till the process is done. After completion of the installation process, click Close and restart your PC. Sign In the Docker https://hub.docker.com/ search for the NEO4j Open Command Prompt as Administrative Rights run the below command to pull the official image of Neo4j docker pull neo4j After completion To verify open the Docker and click the Images menu. To Run open the command prompt and run the following command. docker run -it --rm --publish=7474:7474 --publish=7687:7687 -e NEO4J_dbms_connector_https_advertised__address=":7473" -e NEO4J_dbms_connector_http_advertised__address=":7474" -e NEO4J_dbms_connector_bolt_advertised__address=":7687" --env=NEO4J_AUTH=none neo4j open the link http://localhost:7474/ in the browser and set name and password. Now you are connected to database: open terminal and test cypher command neo4j@neo4j> match (n) return count(n); On Linux platform: Installing Docker on Oracle Linux 8/7 Before we begin, run the system update command to rebuild the repo cache and update installed packages. sudo yum update Now Install some of the default packages sudo yum install -y yum-utils Add Docker repository to Oracle Linux sudo yum-config-manager --add-repo https://download.docker.com/linux/centos/docker-ce.repo Add this below entry to the /etc/yum.repos.d/docker-ce.repo file [centos-extras] name=Centos extras - $basearch baseurl=http://mirror.centos.org/centos/7/extras/x86_64 enabled=1 gpgcheck=1 gpgkey=http://centos.org/keys/RPM-GPG-KEY-CentOS-7 Now run the below command to install the extra plugin required by the docker yum -y install slirp4netns fuse-overlayfs container-selinux Lastly, to install Docker Engine, command line and contained (a standalone high-level container runtime) run, finally, this command. sudo yum install docker-ce docker-ce-cli containerd.io Enable and start the Docker service sudo systemctl start docker sudo systemctl enable docker Add User to Docker group By default, docker needs the root access to run commands, Therefore, we will add our current or the user that you want to use to access docker to the docker group. sudo groupadd docker sudo usermod -aG docker your_user Now, we have finally Installed Docker in Oracle Linux. Our Next step is to PULL the Neo4j Image. docker pull neo4j After, Image is downloaded in the docker, we are ready to run it. To verify it, we can use web browser to and open url localhost:7474 Cypher Shell If you want to run cypher shell, Run the below command docker exec -it testneo4j bash cypher-shell -u neo4j -p test Thank you. Related Posts Heading 2 Add paragraph text. Click “Edit Text” to customize this theme across your site. You can update and reuse text themes.

  • Oracle ADDM Report

    Oracle ADDM Report It analysis AWR report to identify potential performance bottle neck. For each identified issue, it locates the root cause and provides recommendations. It is created and stored every time AWR report is run provided STATISTICS_LEVEL parameter is set to TYPICAL or ALL. To create ADDM Report @$ORACLE_HOME/rdbms/admin/addmrpt.sql Sample output of ADDM report: FINDING: 59% impact (944 seconds) The buffer cache was undersized causing significant additional read I/O. RECOMMENDATION 1: DB Configuration, 59% benefit (944 seconds) ACTION: Increase SGA target size by increasing the value of parameter “sga_target” by 28 M. SYMPTOMS THAT LED TO THE FINDING: Wait class User I/O was consuming significant database time. (83% impact [1336 seconds]) Related Posts Heading 2 Add paragraph text. Click “Edit Text” to customize this theme across your site. You can update and reuse text themes.

  • Upgrade Oracle Golden Gate 12cR1 to 12cR2

    Upgrade Oracle Golden Gate 12cR1 to 12cR2 In this article, we will look at Oracle Golden Gate upgrade from 12.1 to 12.2 on a Linux server. Make sure you Allow golden gate processes to finish processing all current DML and DDL data in GG trail files. Golden Gate Upgrade Prerequisite Perform Golden Gate upgrade If upgrading DB along with GG, upgrade DB first Golden Gate Upgrade Prerequisite Stop user activity on source objects which are involved in replication Stop Extract GGSCI> lag ext1 GGSCI> stop ext1 Make sure replicat applies all transactions on target GGSCI> lag rep1 Stop pump and Replicat GGSCI> stop dp1 GGSCI> stop rep1 Stop Manager on both source and target GGSCI> stop mgr Take backup of GG_HOME on both source and target If you want to upgrade source and target DBs, this is the time. Once DBs are upgraded still, make sure there is no activity on source objects involved in the replication Perform Golden Gate upgrade Download the latest 12.2.0.2 Golden Gate and copy the zip file to both source and target DBs Unzip and start the runInstaller Give same old GG_HOME location while installation – Do not worry, this option will auto-upgrade old GG Do not start manager Perform installation on both source and target DBs in the same old GG_HOME location Run @ulg script for supplemental log upgrade from GG_HOME cd $GG_HOME Sqlplus / as sysdba SQL> @ulg.sql  press enter when prompted Rollover Extract, Pump, and Replicat to next trail sequence number On source DB: ============= alter extract ext1 etrollover alter extract dp1 etrollover info ext1 detail ALTER EXTRACT dp1, EXTSEQNO , EXTRBA 0 info dp1 detail On target DB: ============= alter replicat rep1, EXTSEQNO , EXTRBA 0 Start Manager, Extract, Pump, and Replicat This is an oracle known error. You get prvtlmpg.plb script with GG binaries. This resides under $GG_HOME. Run this script on both source and target database cd $GG_HOME Sqlplus / as sysdba SQL> @prvtlmpg.plb --> press enter when prompted Allow activity on source objects involved in the replication. Related Posts Heading 2 Add paragraph text. Click “Edit Text” to customize this theme across your site. You can update and reuse text themes.

  • Send Email From Oracle Linux Server

    Send Email From Oracle Linux Server Getting an email alert from your Linux server is very important when it comes to notifying administrators. Every time a script is executed, its a great ideas to configure you Linux server to send emails. Let's look at the different ways in which you can configure email alerts using SMTP via Gmail. Gmail Pre-Requisites for SMTP Configure Mail on Linux Note: as we don't have mail server configured on Linux server, we will use gmail to send emails to recipient Gmail Pre-Requisites for SMTP We are basically using gmail account (email sender) which will send emails to recipients. For gmail to send emails, we need to make some changes. Login to your gmail https://myaccount.google.com/u/6/security and and enable 2-factor authentication, if not already done Once done, we will not generate app password so that we do not have to put our gmail password on Linux server mail configuration files Select app as Mail Select device as Other (Custom name), type postfix and click on Generate You alternate password is ready, you will be using this password going forward to setup email sending from the Linux server Our gmail account is ready to be used as email sender! Configure Mail on Linux There two methods you can use to configure email on Linux server Method 1: Send Email Using Postfix (Quick!) Method 2: Send Email Using Mailx Use any one method to configure mail on Linux, not both Method 1: Send Email Using Postfix Install postfix and other dependent packages yum install -y postfix mailx cyrus-sasl cyrus-sasl-plain Start postfix systemctl enable postfix systemctl start postfix Replace sender email and app password before executing below echo 'smtp.gmail.com : ' > /etc/postfix/sasl_passwd Set the permissions and load the sasl_passwd file chmod 600 /etc/postfix/sasl_passwd postmap hash:/etc/postfix/sasl_passwd Open postfix configuration file and paste below at the end of the file on Oracle Linux If you using gmail to send emails, just copy paste below vi /etc/postfix/main.cf #SMTP Gmail Relay for Oracle Linux smtp_sasl_auth_enable = yes smtp_sasl_password_maps = hash:/etc/postfix/sasl_passwd smtp_sasl_security_options = noanonymous smtp_tls_security_level = secure smtp_tls_mandatory_protocols = TLSv1 smtp_tls_mandatory_ciphers = high smtp_tls_secure_cert_match = nexthop smtp_tls_CAfile = /etc/pki/tls/certs/ca-bundle.crt relayhost = smtp.gmail.com:587 Only for CentOS , comment smtp_tls_security_level parameter, else email won't work #smtp_tls_security_level = secure Restart postfix systemctl restart postfix Send test email echo "Hello" | mail -s "test" Verify via below command if email is sent or stuck due to some error mailq Method 2: Send Email using Mailx This method of Linux mail setup does not work with CentOS Install Mailx package yum install -y mailx Edit /etc/mail.rc file and put below SMTP configuration at the end vi /etc/mail.rc #smtp settings set smtp=smtp://smtp.gmail.com:587 set smtp-auth=login set smtp-auth-user= set smtp-auth-password= set smtp-use-starttls set nss-config-dir=/etc/mail_certs set ssl-verify=ignore We need to setup SSL certificate so that gmail knows that emails are being sent from a trusted source. Create a certificate directory then create new certificate and key databases mkdir /etc/mail_certs certutil -N -d /etc/mail_certs --> give password Copy the cert chain for smtp.google.com:465 over to my_certs file ( CTRL + C to end ) openssl s_client -showcerts -connect smtp.gmail.com:465 > /etc/mail_certs/my_certs Open my_certs file and you will see three certs starting with --BEGIN CERTIFICATE-- and --END CERTIFICATE-- cat /etc/mail_certs/my_certs Copy the google cert (usually the first one) into a new file vi /etc/mail_certs/google --> save & close Copy the geotrust cert (usually the second one) into a new file vi /etc/mail_certs/geotrust --> save & close Copy the equifax cert (usually the third one) into a new file vi /etc/mail_certs/equifax --> save & close Start importing the google cert certutil -A -n "Google Internet Authority" -t "TC,," -d /etc/mail_certs -i /etc/mail_certs/google Start importing the geotrust cert certutil -A -n "GeoTrust Global CA" -t "TC,," -d /etc/mail_certs -i /etc/mail_certs/geotrust Start importing the equifax cert certutil -A -n "Equifax Secure Certificate Authority" -t "TCP,," -d /etc/mail_certs -i /etc/mail_certs/equifax Verify if certs are imported properly certutil -L -d /etc/mail_certs The next step is crucial as this will allow other users on Linux server to send email Give permissions on mail_certs so that other users on Linux server can send emails chmod -R 755 /etc/mail_certs/* Send test email echo "Your message" | mail -s "Subject" Enjoy! Related Posts Heading 2 Add paragraph text. Click “Edit Text” to customize this theme across your site. You can update and reuse text themes.

  • Read User Input in Linux Shell Script

    Read User Input in Linux Shell Script When you make shell scripts interactive, it gives a personal touch for the end user. Accepting users input with shell scripts makes users feel in control. User Input with Read Command Linux Read Command With Echo User Input with Read Command Use Linux read command to print a message and store user input into a variable. The below command prints (-p) a message and waits for user input. Anything that users types and hits enter, gets stored in fname variable read -p "Enter First Name: " fname echo "First Name is $fname " The above command will infinitely wait for user to enter an input. You can timeout read command after few seconds with -t option read -t 10 -p "Enter Last Name: " lname To read passwords from user without showing it on screen read -s -p "Password: " password echo "Your password is $password" Linux Read Command With Echo You can use echo command to print message on screen and then use read command echo "Enter Full Name: " read fullname echo "Full Name is $fullname" My preferred method is the first one as read command allows you to print (-p) and read user input into a variable in one single command! 😇😇😇 Related Posts Heading 2 Add paragraph text. Click “Edit Text” to customize this theme across your site. You can update and reuse text themes.

  • Add Linux Target to Oracle Enterprise Manager Cloud Control 13c

    Add Linux Target to Oracle Enterprise Manager Cloud Control 13c This article list out steps to add Linux (OEL 7) as a monitoring target to Oracle Enterprise Manager Cloud Control 13c. Note, the steps are same for 12c OEM as well.This method of agent deployment is also known as Push Method Deployment . Install Oracle Linux 7 Install Oracle 12cR2 Create Database Create Listener Add Linux Target via OEM Monitor Database, Listener Removing a Target Also Read: Setup and install Oracle Enterprise Manager 13c Cloud Control on Linux. Install Oracle Linux 7 For this demonstration purpose, I have used virtualbox VM with 4 GB RAM and 120 GB hard disk. I have setup the virtual machine with exact same steps described in the following article. I have set hostname as “dev.dbagenesis.com” for this linux target while installing OEL 7. Install Oracle Linux on VirtualBox (OEL 7.7) Install Oracle 12cR2 Add OEM server details to /etc/hosts file and below is how target server /etc/hosts file looks like 192.168.1.171 oem.dbagenesis.com oem 192.168.1.172 dev.dbagenesis.com dev As this VM is going to act as OEM target, I am going to install Oracle 12cR2 and also create a database. This will allow us to monitor this new database from OEM console. yum -y install oracle-database-server-12cR2-preinstall Install below packages as they are required by OEM to install Agent on this target machine yum install make -y yum install binutils -y yum install gcc -y yum install libaio -y yum install glibc-common -y yum install libstdc++ -y yum install sysstat -y yum install glibc -y yum install glibc-devel.i686 -y yum install glibc-devel -y yum install libXtst -y Set password for Oracle user passwd oracle Create directories which will hold Oracle software and agent software installation mkdir /u01 mkdir -p /u01/app/oracle/agent chown -R oracle:oinstall /u01 chmod -R 775 /u01 Copy the 12cR2 software files under /u01 and unzip it su - oracle cd /u01 unzip linuxx64_12201_database.zip You can choose to install the Oracle software using GUI method but I will be going with silent mode as it is quick and straightforward . vi /tmp/12cR2_response.rsp oracle.install.responseFileVersion=/oracle/install/rspfmt_dbinstall_response_schema_v12.2.0 oracle.install.option=INSTALL_DB_SWONLY ORACLE_HOSTNAME=dev.dbagenesis.com UNIX_GROUP_NAME=oinstall INVENTORY_LOCATION=/u01/app/oraInventory SELECTED_LANGUAGES=en ORACLE_HOME=/u01/app/oracle/product/12.2.0.1 ORACLE_BASE=/u01/app/oracle oracle.install.db.InstallEdition=EE oracle.install.db.OSDBA_GROUP=dba oracle.install.db.OSOPER_GROUP=dba oracle.install.db.OSBACKUPDBA_GROUP=dba oracle.install.db.OSDGDBA_GROUP=dba oracle.install.db.OSKMDBA_GROUP=dba oracle.install.db.OSRACDBA_GROUP=dba SECURITY_UPDATES_VIA_MYORACLESUPPORT=false DECLINE_SECURITY_UPDATES=true oracle.installer.autoupdates.option=SKIP_UPDATES Run the installer in silent mode to start the Oracle software installation ./runInstaller -ignoreSysPrereqs -showProgress -silent -responseFile /tmp/12cR2_response.rsp Set the Oracle user bash profile # .bash_profile # Get the aliases and functions if [ -f ~/.bashrc ]; then . ~/.bashrc fi # User specific environment and startup programs export ORACLE_HOME=/u01/app/oracle/product/12.2.0.1 export ORACLE_SID=devdb export AGENT_HOME=/u01/app/oracle/agent/agent_inst PATH=$PATH:$HOME/.local/bin:$ORACLE_HOME/bin export PATH Don’t forget to execute the bash profile . .bash_profile Create Database Let's create devdb database on this VM. I am using dbca silent mode for quick db creation dbca -silent -createDatabase \ -templateName General_Purpose.dbc \ -gdbName devdb \ -sid devdb \ -createAsContainerDatabase false \ -emConfiguration NONE \ -datafileDestination /u01/app/db_files \ -storageType FS \ -characterSet AL32UTF8 \ -totalMemory 2048 \ -recoveryAreaDestination /u01/app/FRA Create Listener Fire NETMGR utility and create database listener and tns entries. Add Linux Target via OEM Before we can add the Linux target, we must add the target details to /etc/hosts file on OEM server 192.168.1.171 oem.dbagenesis.com oem 192.168.1.172 dev.dbagenesis.com dev Login to OEM console via web browser and let us start adding the target On top right side, click on Setup >> Add Target >> Add Target Manually Under Add Host Targets >> click on Install Agent on Host Click on + Add option, give hostname and select platform. Click on Next Provide the agent installation directory /u01/app/oracle/agent . Click on Named Credential and give the oracle user id and password of the target system. Click on Root Credential and give root user id and password of the target system. Make sure to delete anything that you see under Privileged Delegation Setting Note: Named credential is the Oracle user ID & password of the target server with which OEM will be able to install agent software. Root credential is the root user id & password of the target server with witch OEM will be able to execute root scripts. Click Next, review the agent installation, and then click on Deploy Agent Once the installation is done, you can click on Targets >> Hosts to check the newly added host Monitor Database, Listener Once the agent software is installed, it will not automatically start monitoring the database, listener, asm etc. You need to set it up to do it for you. Navigate to Setup >> Add Target >> Add Targets Manually. Click on Add target using guided process Select the below option and click on Add Under specify host, click on Search Icon and select the target host where DB & listener resides. Then click on Next OEM will discover the database on target system and list it. Select the database, give DBSNMP password (default is Oracle). Then click on test connection. If connection is successful, click ok then click on Next Check the review screen and click on Save Done, now your Database and listeners are also added to monitoring. You can check the by going to Targets >> Databases. Removing a Target To remove a target and all its monitoring from OEM, first bring down the target agent software. We can do this via OEM, under Targets >> All Targets search for the agent >> right click >> control >> shut down OEM will prompt you to provide credentials to login to target server so that it can shutdown the agent. We must provide the Named credentials of the Dev server and then click on Shutdown Wait until the agent software is shutdown. Once again Right Click agent >> Target Setup >> Agent Decommission Click on continue to start removing the agent from the target server OEM will list out all the targets (host, database, listener) that are being monitored by it and once you de-commission the agent, OEM will stop monitoring those targets. Click on Continue Done, the agent and all target monitoring are removed from the OEM. Next, you will have to manually delete the agent software from the target server On target system as oracle user =============================== cd /u01/app/oracle/agent rm -rf * Related Posts Heading 2 Add paragraph text. Click “Edit Text” to customize this theme across your site. You can update and reuse text themes.

  • Oracle Golden Gate Interview Questions

    Oracle Golden Gate Interview Questions Here are some of the frequently asked Oracle Golden Gate interview questions and answers. Enjoy! Q. What is the significance of Oracle GoldenGate Manager? To give users control over Oracle GoldenGate processes, Manager provides a command line interface to perform a variety of administrative, housekeeping, and reporting activities, including Setting parameters to configure and fine-tune Oracle GoldenGate processes Starting, stopping, and monitoring capture and delivery modules Critical, informational event, and threshold reporting Resource management Trail File management Q. Why it is highly desirable that tables that you want to replicate should have primary key? In simple words, to uniquely identify a record GoldenGate requires a primary key. If the primary key does not exist on the source table, GoldenGate will create its own unique identifier by concatenating all the table columns together. This will certainly prove inefficient as volume of data that needs to be extracted from the redo logs will increase exponentially. In normal scenario, when a table has primary key, GoldenGate process will fetch only the primary key and the changed data (before and after images in the case of an update statement). GoldenGate process will also warn you that primary key does not exist on the target table and you may receive the following warning in the GoldenGate error log WARNING OGG-xxxx No unique key is defined for table ‘TARGET_TABLE_NAME’. All viable columns will be used to represent the key, but may not guarantee uniqueness. KEYCOLS may be used to define the key Having primary key also insure fast data lookup when the Replicat recreates and applies the DML statements against the target database. But keep in mind that it is not “mandatory” that primary key must be present for the table. Q. Is it MUST that the source database should be in archivelog mode? It is NOT must that the source database is in the archivelog mode but for any serious, mission-critical GoldenGate system it is almost mandatory to have source system in Archive Log mode. Q. Without going into details, explain high level steps of setting up GoldenGate. Below are the key steps to install and configure the Golden Gate Download the software from the Oracle website and upload to server Unpack/Unzip the installation zip file Prepare source and target system Install the software on the source and target system (for 12c use OUI) Prepare the source database (some DB parameters need to be adjusted) Configure the Manager process on the source and target system Configure the Extract process on the source system Configure the data pump process on the source system Configure the Replicat process on the target system Start the Extract process Start the data pump process Start the Replicat process Q. When creating GoldenGate database user for database 12c, what special precaution you need to take? You must grant the GoldenGate admin user access to all database containers on the source side so that GoldenGate can access the redo logs for all the databases (container and pluggable) You must also grant the DBA role with the container=all option. SQL> GRANT DBA TO C##GOLDENADMIN CONTAINER=ALL Q. What is Downstream capture mode of GoldenGate? Traditionally log mining work for the source data happens on Source database side but in Downstream capture mode Oracle Data Guard redo transport mechanism is used. This enables continuous log shipping to the target database’s standby redo logs in real time. Log mining work to fetch DDL/DML transactions happens on the target side. Q. How do you take backup of GoldenGate? Your source/database you can backup easily using backup tools like Oracle Recovery Manager (RMAN) but to backup the GoldenGate you will need to back up the GoldenGate home and subdirectories that contain the trail files, checkpoint files etc. Without these key files, GoldenGate will not be able to recover from the last checkpoint. It means that if somehow you lose all these key GoldenGate files then you will have no option but to go for a new initial load. RMAN simply do not have capability to backup the OS or no database files. So either you keep all your GoldenGate related files on some kind of SAN setup which gets backed up daily at storage level or use Unix shell commands etc in cron job to take filesystem backups. Q. What is checkpoint table? In which capture mode it is used: classic or integrated? Oracle GoldenGate extract and replicat processes perform checkpoint operations. Now in the event of some unexpected failure, the checkpoint file or database table ensures extract and replicat re-start from the point of failure and avoid re-capture and re-apply of transactions. So, Checkpoint table enables the checkpoint to be included within Replicat’s transaction, ensuring complete recovery from all failure scenarios. You use the GGSCI add checkpoint table command to create the checkpoint table. Checkpoint table is used for Classic capture/replicate mode. For Integrated mode, the Checkpoint table is not required and should not be created. Q. What transaction types does Golden Gate support for Replication? Goldengate supports both DML and DDL Replication from the source to target. Q. What are the supplemental logging pre-requisites? The following supplemental logging is required. Database supplemental logging Object level logging Q. Why is Supplemental logging required for Replication? Integrated Capture (IC) In the Integrated Capture mode, GoldenGate works directly with the database log mining server to receive the data changes in the form of logical change records (LCRs). IC mode does not require any special setup for the databases using ASM, transparent data encryption, or Oracle RAC. This feature is only available for oracle databases in Version 11.2.0.3 or higher. It also supports various object types which were previously not supported by Classic Capture. This Capture mode supports extracting data from source databases using compression. Integrated Capture can be configured in an online or downstream mode. Q. List the minimum parameters that can be used to create the extract process? The following are the minimum required parameters which must be defined in the extract parameter file. EXTRACT NAME USERID EXTTRAIL TABLE Q. I want to configure multiple extracts to write to the same exttrail file? Is this possible? Only one Extract process can write to one exttrail at a time. So, you can’t configure multiple extracts to write to the same exttrail. Q. What type of Encryption is supported in Goldengate? Oracle Goldengate provides 3 types of Encryption. Data Encryption using Blow fish. Password Encryption. Network Encryption. Q. What are some of the key features of GoldenGate 12c? The following are some of the more interesting features of Oracle GoldenGate 12c: Support for Multitenant Database Coordinated Replicat Integrated Replicat Mode Use of Credential store Use of Wallet and master key Trigger-less DDL replication Automatically adjusts threads when RAC node failure/start Supports RAC PDML Distributed transaction RMAN Support for mined archive logs Q. If have created a Replicat process in OGG 12c and forgot to specify DISCARDFILE parameter. What will happen? Starting with OGG 12c, if you don’t specify a DISCARDFILE OGG process now generates a discard file with default values whenever a process is started with START command through GGSCI. Q. Is it possible to start OGG EXTRACT at a specific CSN? Yes, Starting with OGG 12c you can now start Extract at a specific CSN in the transaction log or trail. Example: START EXTRACT fin ATCSN 12345 START EXTRACT finance AFTERCSN 67890 Q. List a few parameters which may help improve the replicat performance? Below are the parameters below can be used to improve the replicat performance: BATCHSQL GROUPTRANSOPS INSERTAPPEND Q. What are the most common reasons of an Extract process slowing down? Some of the possible reasons are Long running batch transactions on a table. Insufficient memory on the Extract side. Uncommitted, long running transactions can cause writing of a transaction to a temporary area (dirtmp) on disk. Once the transaction is committed it is read from the temporary location on the file system and converted to trail files. Slow or overburdened Network. Q. What are the most common reasons of the Replicat process slowing down? Some of the possible reasons are Large amount of transactions on a particular table. Blocking sessions on the destination database where non-Goldengate transactions are also taking place on the same table as the replicat processing. If using DBFS, writing & reading of trail files may be slow if SGA parameters are not tuned. For slow Replicat’s, latency may be due to missing indexes on target. Replicat having to process Update, delete of rows in very large tables. Q. My extract was running fine for a long time. All of a sudden it went down. I started the extract processes after 1 hour. What will happen to my committed transactions that occurred in the database during last 1 hour? OGG checkpoint provides the fault tolerance and make sure that the transaction marked for committed is capture and captured only once. Even if the extract went down abnormally, when you start the process again it reads the checkpoint file to provide the read consistency and transaction recovery. Q. I have configured Oracle GoldenGate integrated capture process using the default values. As the data load increases I see that extract starts lagging behind by an hour (or more) and database performance degrades. How you will resolve this performance issue? When operating in integrated capture mode, you must make sure that you have assigned sufficient memory to STREAMS_POOL_SIZE. An undersized STREAMS_POOL_SIZE or limiting the streams pool to use a specific amount of memory can cause troubles. The best practice is to allocate STREAMS_POOL_SIZE at the instance level and allocate the MAX. SGA at GG process level as below SQL> alter system set STREAMS_POOL_SIZE=3G TRANLOGOPTIONS INTEGRATEDPARAMS (MAX_SGA_SIZE 2048, PARALLELISM 4) Q. Why would you segregate the tables in a replication configuration? How would you do it? In OGG you can configure replicat at the data at the schema level or at the table level using TABLE parameter of extract and MAP parameter of replicat. For replicating the entire database, you can list all the schemas in the database in the extract/replicat parameter file. Depending the amount of redo generation, you can split the tables in a schema in multiple extracts and replicats to improve the performance of data replication. Alternatively, you can also group a set of tables in the configuration by the application functionality. Alternatively, you may need to remove tables which have long running transactions in a separate extract process to eliminate lag on the other tables. Let’s say that you have a schema named SCOTT and it has 100 hundred tables. Out of these hundred tables, 50 tables are heavily utilized by application. To improve the overall replication performance, you create 3 extracts and 3 replicats as follows: Ext_1/Rep_1 –> 25 tables Ext_2/Rep_2 –> 25 tables Ext_3/Rep_3 –> 50 tables Ext_1/Rep_1 and Ext_2/Rep_2 contains 25 table each which are heavily utilized or generate more redo. Ext_3/Rep_3 contains all the other 50 tables which are least used. Q. How do you view the data which has been extracted from the redo logs? The logdump utility is used to open the trail files and look at the actual records that have been extracted from the redo or the archive log files. Q. Why should I upgrade my GoldenGate Extract processes to Integrated Extract? Oracle is able to provide faster integration of the new database features by moving the GoldenGate Extraction processes into the database. Due to this, the GoldenGate Integrated Extract has a number of features like Compression which are not supported in the traditional Extract. You can read more about how to upgrade to Integrated Extract and more about Integrated Delivery. Going forward, preference should be give to create new extracts as Integrated Extracts and also to upgrade existing traditional Extracts . Q. What is the minimum Database version which supports Integrated Delivery? Oracle 11.2.0.4 is the minimum required database version that supports both Integrated extract and Integrated Reaplicat. Q. What databases supports GoldenGate Integrated Delivery? Oracle Integrated Delivery is only available for Oracle Databases. Q. With Integrated Delivery, where can we look for the performance stats? Yes with 12c, performance statistics are collected in the AWR repository and the data is available via the normal AWR reports. Q. What are the steps required to add a new table to an existing replication setup? The steps to be executed would be the following Include the new table to the Extract & pump process. Obtain starting database SCN and Copy the source table data to the target database Start Replicat on target at the source SCN database point. Q. What is the purpose of the DEFGEN utility? When the source and the target schema objects are not the same (different DDL’s) the Replicat process needs to know the source definition of the objects . The output from the DEFGEN utility is used in conjunction with the trail data to determine which column value in the trail belongs to which column. Q. We want to setup one-way data replication for my online transaction processing application. However, there are compressed tables in the environment. Please suggest how I can achieve it. You must use Oracle Golden Gate 11.2 and configure Golden Gate Integrated Capture process to extract data from compressed tables. Note: Pre OGG 11.2 doesn’t support extracting data from compressed tables Q. What are the different OGG Initial load methods available? OGG has 2 functionalities, one it is used for Online Data Replication and second for Initial Loading . If you are replicating data between 2 homogeneous databases then the best method is to use database specific method (Exp/Imp, RMAN, Transportable tablespaces, Physical Standby and so on). Database specific methods are usually faster than the other methods. —If you are replicating data between 2 heterogeneous databases or your replicat involves complex transformations, then the database specific method can’t be used. In those cases you can always use Oracle GoldenGate to perform initial load. Within Oracle GoldenGate you have 4 different ways to perform initial load. Direct Load – Faster but doesn’t support LOB data types (12c include support for LOB) Direct Bulk Load – Uses SQL*LOAD API for Oracle and SSIS for MS SQL SERVER File to replicat – Fast but the rmtfile limit is 2GB. If the table can’t be fit in 1 rmtfile you can use maxfiles but the replicat need to be registered on the target OGG home to read the rmtfiles from source. File to Database utility – depending on the target database, use SQL*LOAD for Oracle and SSIS for MS SQL SERVER and so on. Q. I have a table called ‘TEST’ on source and target with same name, structure and data type but in a different column order. How can you setup replication for this table? OGG by default assumes that the sources and target tables are identical. A table is said to be identical if and only if the table structure, data type and column order are the same on both the source and the target. If the tables are not identical you must use the parameter ‘SOURCEDEFS’ pointing to the source table definition and COLMAP parameter to map the columns from source to target. Q. What is the best practice to delete the extract files in OGG? Use the manager process to delete the extract files after they are consumed by the extract/replicat process PURGEOLDEXTRACTS /u01/app/oracle/dirdat/et*, USECHECKPOINTS, MINKEEPHOURS 2 Q. I have a one-way replication setup. The system administration team wants to apply an OS patch to both the OGG source host and the target servers. Provide the sequence of steps that you will carry before and after applying this patch. Check to make sure that the Extract has processed all the records in the data source (Online Redo/archive logs) GGSCI> send extract , logend (The above command should print YES) Verify the extract, pump and replicat has zero lag. GGSCI> send extract , getlag GGSCI> send extract , getlag GGSCI> send replicat , getlag (The above command should pring “At EOF, no more records to process.”) Stop all application and database activity. Make sure that the primary extract is reading the end of the redolog and that there is no LAG at all for the processes. Now proceed with stopping the processes on Source Stop the primary extract Stop the pump extract Stop the manager process Make sure all the processes are down On Target Stop replicat process Stop mgr Make sure that all the processes are down. Proceed with the maintenance After the maintenance, proceed with starting up the processes on Source Start the manager process Start the primary extract Start the pump extract (Or simply all the extract processes as GGSCI> start extract *) Make sure all that the processes are up. On Target Start the manager process Start the replicat process Make sure that all the processes are up Q. What are the basic resources required to configure Oracle GoldenGate high availability solution with Oracle Clusterware? There are 3 basic resources required Virtual IP Shared storage Action script Q. How can you determine if the parameters for a process was recently changed? Whenever a process is started, the parameters in the .prm file for the process is written to the process REPORT. You can look at the older process reports to view the parameters which were used to start up the process. By comparing the older and the current reports you can identify the changes in the parameters. Q. Is there a way to check the syntax of the commands in the parameter file without actually running the GoldenGate process? Yes, you can place the SHOWSYNTAX parameter in the parameter file and try starting. If there is any error you will see it. Q. What are macros? Macro is an easier way to build your parameter file. Once a macro is written it can be called from different parameter files. Common parameters like username/password and other parameters can be included in these macros. A macro can either be another parameter file or a library. Related Posts Heading 2 Add paragraph text. Click “Edit Text” to customize this theme across your site. You can update and reuse text themes.

  • Last modified table

    Last modified table As a DBA, application team sometimes might ask you to provide details of last modified table in oracle. The table modification can be insert, update or delete. Below queries get details of last or latest modified table in oracle database. Run the queries depending upon the database version. Last modified table (10g and above) set linesize 500; select TABLE_OWNER, TABLE_NAME, INSERTS, UPDATES, DELETES, to_char(TIMESTAMP,'YYYY-MON-DD HH24:MI:SS') from all_tab_modifications where table_owner<>'SYS' and EXTRACT(YEAR FROM TO_DATE(TIMESTAMP, 'DD-MON-RR')) > 2010 order by 6; Last modified table (9i) In 9i, table monitoring has to be enabled manually or else the all_tab_modifcations wont keep record of changes. 10g onwards, oracle by default records the modifications Last modified table in oracle for 9i db col object for a20; col object_name for a20; SELECT OWNER, OBJECT_NAME, OBJECT_TYPE, to_char(LAST_DDL_TIME,'YYYY-MON-DD HH24:MI:SS') from dba_objects where LAST_DDL_TIME=(select max(LAST_DDL_TIME) from dba_objects WHERE object_type='TABLE'); Related Posts Heading 2 Add paragraph text. Click “Edit Text” to customize this theme across your site. You can update and reuse text themes.

  • MySQL 8.0 on CentOS 7

    MySQL 8.0 on CentOS 7 This article demonstrates MySQL installation on Cent OS Linux 7 version . Installation of MySQL Install wget to download MySQL repository yum install wget Download MySQL # wget https://repo.mysql.com//mysql80-community-release-el7-5.noarch.rpm Adding the MySQL Yum Repository # yum install mysql80-community-release-el7-5.noarch.rpm Installing MySQL $ yum install mysql-community-server Start MySQL service and enable it to auto-start on reboot # chkconfig mysqld on # service mysqld start Start the MySQL server with the following command # systemctl start mysqld You can check the status of the MySQL server with the following command: # systemctl status mysqld A superuser account 'root'@'localhost' is created. A password for the superuser is set and stored in the error log file. To reveal it, use the following command # grep 'temporary password' /var/log/mysqld.log Change the root password as soon as possible by logging in with the generated, temporary password and set a custom password for the superuser account: # mysql -uroot -p mysql>ALTER USER 'root'@'localhost' IDENTIFIED BY 'password'; Create MySQL Database Use the below command to create a new database mysql > create database mydb ; List all the databases mysql > show databases ; To switch between databases use the below command mysql > exit You can make new connections directly to the database as follows. $ mysql -- user = root -- database = mydb -- password Enter password : Installation Done! Related Posts Heading 2 Add paragraph text. Click “Edit Text” to customize this theme across your site. You can update and reuse text themes.

Search Results

bottom of page