<?xml version="1.0" encoding="utf-8"?>
<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns="http://purl.org/rss/1.0/" xmlns:dc="http://purl.org/dc/elements/1.1/">
 <channel rdf:about="/rss.xml">
  <description>Latest jobs / DBA / Data Warehousing</description>
  <link>https://ineojobs.com/</link>
  <title>IneoJobs.com</title>
  <dc:date>12-04-2026</dc:date>
  <items>
   <rdf:Seq>
    <rdf:li rdf:resource="https://ineojobs.com/job/819939/cbc-senior-consultant-at-cognizant/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/820144/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/820141/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/820041/support-engineer-iii-just-walk-out-tech-at-amazon/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/820055/support-engineer-iii-just-walk-out-tech-at-amazon/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/819986/support-engineer-iii-just-walk-out-tech-at-amazon/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/819827/ey-gds-consulting-ai-and-data-snowflake-architect-manager-at-ey/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/820093/python-developer-airlines-domain-at-globallogic/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/819351/data-architect-aws-modernization-at-trianz/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/819380/custom-software-engineer-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/819339/custom-software-engineer-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/819308/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/819287/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/819196/tanzu-data-intelligence-rabbitmq-professional-at-vmware/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/819166/data-architect-at-virtusa/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/819175/data-architect-scala-at-happiest-minds/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/819019/senior-software-engineer-python-gen-ai-at-wells-fargo/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/819195/data-modeler-architect-vp-at-barclays/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/818796/database-pl-sql-lead-at-iris-software/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/819541/senior-software-engineer-at-ltimindtree/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/819525/senior-software-engineer-i-sql-developer-at-optum/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/818460/hiring-grafana-developer-at-2coms/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/818764/databricks-python-senior-engineer-at-iris-software/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/817961/murex-datamart-developer-at-luxoft/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/819203/support-engineer-iii-at-amazon/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/819807/data-warehouse-testing-lead-at-infosys/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/819542/senior-integration-developer-oracle-integration-cloud-oracle-epm-at-fujitsu/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/819181/support-engineer-iii-at-amazon/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/818013/duckcreek-policy-developer-at-coforge/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/817792/staff-database-reliability-engineer-in-oracle-cloud-at-rackspace-technology/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/817761/oracle-ebs-bi-apps-obia-qe-testing-functional-technical-at-rackspace-technology/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/819374/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/819238/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/819216/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/819299/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/818249/sap-hana-professional-at-cirruslabs/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/819242/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/819317/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/819373/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/817912/tanzu-data-intelligence-mts3-rabbitmq-professional-at-vmware/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/817719/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/817914/oracle-dbaexadata-professional-at-capgemini/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/817952/l1-production-support-engineer-at-luxoft/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/817702/data-architect-at-tata-consultancy/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/817741/data-platform-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/818114/custom-software-engineer-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/818348/tanzu-data-intelligence-rabbitmq-professional-at-vmware/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/818001/oracle-iam-professional-at-cirruslabs/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/818304/oracle-dba-at-quinnox/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/819206/erwin-data-modeler-architect-vois-vodafone-hiring-at-vois/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/817522/software-developer-sql-power-bi-at-siemens/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/817710/aws-data-architect-at-tata-consultancy/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/817739/data-architect-aws-at-leading-client/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/819500/lead-software-engineer-at-leading-client/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/817712/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/817740/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/819089/power-bi-developer-at-tata-consultancy/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/819168/data-architect-at-tata-consultancy/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/817563/power-bi-developer-at-leading-client/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/817717/data-architect-aws-at-virtusa/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/817604/database-developer-at-idexcel/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/816896/data-architect-finance-at-kone/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/816880/assoc-architect-it-data-architecture-at-baxter/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/816895/manager-data-operations-engineering-at-pfizer/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/816946/data-base-engineer-at-zensar/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/816908/eds-specialist-sppids3d-admin-at-worley/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/816938/database-administrator-lead-at-idexcel/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/816862/lead-azure-data-engineer-at-hdfc-bank/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/816999/sr-analyst-iii-erp-package-applications-at-dxc-technology/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/816237/power-bi-developer-mid-level-at-infosys/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/815870/custom-software-engineer-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/815842/application-developer-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/815918/application-support-engineer-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/816294/etl-developer-at-virtusa/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/815784/app-dev-support-engineer-iii-at-conduent/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/816967/gw-developer-_datahub-at-zensar/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/816884/vp-chief-ai-architect-at-pfizer/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/817014/sr-analyst-i-software-engineering-at-dxc-technology/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/815516/data-architect-at-barclays/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/815818/custom-software-engineer-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/816090/custom-software-engineer-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/816263/sql-dba-expert-at-zensar/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/815580/manager-digital-and-technology-finance-analytics-delivery-at-pfizer/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/816250/custom-software-engineer-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/816042/application-developer-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/816049/architect-at-cognizant/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/815537/duck-creek-claims-developer-at-coforge/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/815806/application-developer-oracle-cloud-middleware-at-ibm/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/815836/application-developer-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/816094/etl-tester-lead-at-iris-software/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814955/data-architect-at-virtusa/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/815062/senior-etl-talend-developer-at-datamatics/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/819504/lead-software-engineer-at-virtusa/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/815053/sr-script-writer-at-dxc-technology/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814931/bi-developer-power-bi-at-exl/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/815036/snowflake-developer-architect-at-zensar/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/815514/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/815531/data-architect-at-cgi/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/819171/app-dev-support-engineer-ii-at-conduent/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/815524/data-architect-at-iris-software/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/815525/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/817764/process-mining-platform-engineer-at-pepsico/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/815970/systems-and-infrastructure-engineer-iii-at-walmart/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/815552/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/815548/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/815543/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814516/mainframe-db2-dba-at-tata-consultancy/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814728/oracle-cerner-ehr-systems-engineerambulatoryambulatory-scheduling-at-luxoft/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814679/etl-developer-at-dxc-technology/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814648/data-architect-at-addrec-solutions/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814448/data-architect-data-at-happiest-minds/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814692/guidewire-datahub-infocenter-developer-dhic-at-zensar/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814562/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814527/aws-data-architect-at-tata-consultancy/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814702/cloud-native-app-developer-standard-at-infogain/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814884/data-architect-data-at-happiest-minds/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814459/data-architect-at-wipro/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814069/data-architect-at-altimetrik/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814098/manager-data-operations-engineering-at-pfizer/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814395/ux-designer-senior-at-infogain/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814427/developer-etl-at-zensar/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/816789/datawarehouse-architect-at-icici-bank/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814319/database-architect-at-icertis/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814540/dea-core-on-premise-reporting-professional-at-zensar/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/816749/lead-application-development-engineering-associate-at-ncr-corporation/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814203/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814062/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814202/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814199/sql-dba-sql-server-dba-performance-tuning-at-tata-consultancy/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814196/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814065/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814168/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814071/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814103/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814147/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814046/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814129/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814118/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814139/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814663/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814671/oracle-epm-planning-usi-lead-developer-at-cirruslabs/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814417/regular-xceptor-developer-at-luxoft/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814190/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814670/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814431/cloud-native-app-developer-standard-at-infogain/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814091/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814547/nas-impl-ice-business-analyst-at-adp/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814321/databricks-developer-at-infobeans/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814526/senior-technical-support-engineer-at-infobeans/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/816784/aws-cloud-support-senior-engineer-at-iris-software/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/818985/sr-machine-learning-engineer-search-ai-at-apple/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814424/sr-power-bi-developer-at-infobeans/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814983/data-architect-at-syngenta-india/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/813539/data-architect-at-ntt-data/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/813910/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/813586/data-engineer-a%c2%80%c2%93-microsoft-sql-server-at-zensar/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/813755/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/815765/manager-database-developer-clinical-database-management-at-pfizer/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/813682/oracle-empirica-implementation-consultant-at-zensar/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/813737/jira-admin-support-at-zensar/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814339/data-architect-at-leading-client/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814899/data-modeler-architect-at-barclays/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814029/lead-data-engineer-avp-at-hdfc-bank/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/814935/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/818840/digital-manufacturing-it-application-analyst-senior-at-cummins/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/816526/application-developer-oracle-cloud-integration-at-ibm/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/813924/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/813559/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/813608/dsp-data-stewardship-platform-professional-at-cirruslabs/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/813613/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/813662/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/813680/sample-manager-lims-developer-at-cirruslabs/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/813700/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/813710/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/813782/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/813792/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/813805/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/813832/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/813846/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/813881/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/812451/senior-software-engineer-at-walmart/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/813096/denodo-developer-at-itc-infotech/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/813926/dbt-developer-at-luxoft/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/815553/database-administrator-at-sunquest-information/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/812163/mainframe-db2-dba-admin-not-developer-at-tata-consultancy/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/812762/test-lead-at-bahwan-cybertek/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/812916/mainframe-z-os-jcl-admin-only-at-tata-consultancy/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/812948/software-engineer-python-developer-at-bahwan-cybertek/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/813856/qa-professional-azure-and-databricks-at-infobeans/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/813170/software-engineer-python-developer-at-bahwan-cybertek/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/813091/net-angular-professional-at-capgemini/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/816655/application-support-engineer-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/811402/power-bi-developer-at-crisil/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/811349/gen-ai-architect-at-capgemini/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/811516/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/811529/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/811514/senior-mainframe-ims-db2-database-administrator-at-cirruslabs/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/811599/cloud-iaas-architecture-design-oracle-cloud-expert-at-zensar/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/811883/etl-aws-glue-senior-engineer-at-iris-software/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/813130/warehouse-logistics-assistant-male-day-shift-sal-upto-3-lpa-at-trigent/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/811608/scrum-master-at-people-tech/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/811451/senior-mongodb-administrator-at-cirruslabs/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/811368/assoc-architect-it-data-architecture-at-baxter/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/811363/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/811355/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/811331/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/811329/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/811557/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/811474/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/811556/senior-snowflake-azure-sme-at-dxc-technology/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/811540/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/811577/power-bi-developer-at-crisil/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/811993/principal-data-scientist-ai-application-development-expert-at-sap/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/811525/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/811518/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/811825/analyst-at-eclerx/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/810548/qa-with-azure-and-databricks-professional-at-infobeans/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/810396/senior-software-engineer-power-bi-developer-at-cgi/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/812804/azure-data-engineer-at-hdfc-bank/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/810498/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/810405/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/810483/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/810338/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/810387/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/810326/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/810380/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/810929/support-engineer-at-amazon/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/812888/senior-software-engineer-etl-developer-at-cgi/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/811298/support-engineer-lmaq-de-at-amazon/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/810507/bi-developer-power-bi-sql-at-apexon/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/811419/servicenow-csm-professional-at-dxc-technology/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/812013/advisor-systems-security-analysis-ibm-datapower-at-fis/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/810037/mim-professional-at-zensar/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/809972/aws-sre-devops-professional-at-zensar/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/809874/senior-software-engineer-at-allianz/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/812347/reconciliation-system-developer-at-luxoft/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/813121/uft-consultant-at-opentext/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/809763/senior-software-engineer-_power-bi-developer-at-cgi/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/809783/ibm-isam-specialist-at-virtusa/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/811945/python-developer-for-data-engineering-at-msci-services/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/809817/ey-gds-consulting-ai-and-data-data-architect-hadoop-manager-at-ey/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/809848/data-architect-at-cgi/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/809807/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/809871/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/809839/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/809814/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/809869/eim-data-architect-at-virtusa/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/812281/datawarehouse-architect-at-icici-bank/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/809830/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/810206/python-django-lead-at-infobeans/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/810282/senior-software-analyst-at-softtek/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/809820/process-mining-platform-engineer-at-pepsico/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/810144/oracle-fusion-erp-security-analyst-at-infobeans/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/810057/oracle-fusion-oci-engineer-at-infobeans/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/811616/oracle-fusion-oci-engineer-at-infobeans/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/810322/oracle-fusion-erp-a%c2%80%c2%93-security-analyst-at-infobeans/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/809842/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/809809/gcp-data-architect-at-capco/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/810210/database-administrator-at-sunquest-information/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/810813/architect-atc-at-virtusa/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/809825/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/808296/support-engineer-iii-just-walk-out-tech-at-amazon/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/808725/senior-software-engineer-at-walmart/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/808808/custom-software-engineer-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/808970/data-architect-at-accenture-hr-aditi/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/809022/data-architect-at-accenture-hr-aditi/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/808863/sr-software-engineer-at-encora/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/809469/senior-technical-support-engineer-at-infobeans/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/809540/sr-power-bi-developer-at-infobeans/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/808798/senior-software-engineer-informatica-developer-at-cgi/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/808976/data-architect-at-accenture-hr-aditi/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/809109/data-architect-at-accenture-hr-aditi/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/809543/sap-developer-lead-at-infogain/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/809029/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/808776/custom-software-engineer-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/808037/operations-engineer-at-accenture-hr-aditi/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/807860/senior-database-administrator-at-sunquest-information/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/807791/ibm-datastage-developer-at-capgemini/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/809017/data-architect-at-accenture-hr-aditi/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/808979/data-architect-at-accenture-hr-aditi/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/809028/data-architect-at-accenture-hr-aditi/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/809494/database-administrator-at-sunquest-information/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/807806/semantic-ai-ml-architect-at-luxoft/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/807868/senior-database-administrator-at-sunquest-information/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/808473/salesforce-developer-and-admin-iss-at-amazon/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/808284/skilled-it-warehouse-logistics-vendor-specialist-at-trigent/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/808910/senior-software-engineer-i-at-optum/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/808958/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/807802/ab-initio-developer-at-capgemini/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/807942/support-engineer-iii-just-walk-out-tech-at-amazon/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/807947/senior-software-engineer-at-walmart/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/808815/custom-software-engineer-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/808816/test-lead-data-at-hexaware-technologies/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/809039/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/808817/custom-software-engineer-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/809045/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/809041/data-architect-at-accenture/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/807372/sr-engineer-software-at-empower/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/808949/data-architect-at-virtusa/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/807477/senior-software-engineer-c-developer-at-nice/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/805682/technical-specialist-app-engg-services-at-birlasoft/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/806778/servicenow-sam-ham-itam-developer-with-cmdb-skills-at-kyndryl/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/806646/senior-hogan-developer-at-luxoft/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/806560/databricks-developer-at-infobeans/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/805849/azure-data-architect-subcon-at-birlasoft/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/806085/postgress-subcontractor-at-birlasoft/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/806574/senior-mainframe-developer-at-luxoft/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/806488/hiring-senior-cloud-sql-dba-sun-technology-inc-bangalore-at-sun-technologies/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/806519/hiring-cloud-sql-dba-sun-technology-inc-bangalore-at-sun-technologies/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/806597/senior-software-engineer-at-luxoft/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/806609/etl-qa-at-cirruslabs/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/806632/ms-azure-with-adf-databricks-expert-at-cirruslabs/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/807055/senior-data-engineer-python-developer-at-luxoft/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/804708/gcc-network-design-sr-modeler-at-pepsico/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/804505/erp-cloud-techno-fucntional-professional-at-ncr-corporation/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/804430/etl-tester-senior-engineer-at-iris-software/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/805692/technical-lead-oracle-cx-consultant-at-birlasoft/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/806392/hiring-cloud-sql-dba-sun-technology-inc-bangalore-at-sun-technologies/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/806397/hiring-oracle-dba-sun-technology-inc-bangalore-at-sun-technologies/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/806531/hiring-oracle-administrator-sun-technology-inc-bangalore-at-sun-technologies/"/>
    <rdf:li rdf:resource="https://ineojobs.com/job/804874/postgresql-developer-at-centilytics/"/>
   </rdf:Seq>
  </items>
 </channel>
 <item rdf:about="https://ineojobs.com/job/819939/cbc-senior-consultant-at-cognizant/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &amp;nbsp;&lt;/div&gt;&lt;p&gt;&lt;br&gt;&amp;nbsp;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Job summary&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;Looking for a Senior Developer with 5 to 10 years of experience developing reporting solution with Power BI Data Models Azure Data Factory Azure SQL Synapse Databricks. Data Management &amp;amp; Analytics&lt;/p&gt;&lt;p&gt;&lt;br&gt;&amp;nbsp;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Responsibilities&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt; &lt;li&gt;Should have strong experience designing end to end business intelligence solution with Microsoft BI Stack&lt;/li&gt; &lt;li&gt;Should have strong exposure to the Azure services - ADF SQL Server Synapse and Databricks&lt;/li&gt; &lt;li&gt;Should have good experience writing DAX queries and support performance optimization techniques&lt;/li&gt; &lt;li&gt;Should have strong SQL and Python coding skills&lt;/li&gt; &lt;li&gt;Should have good experience working on Microsoft Fabric solution using Data Factory Spark Jobs and Data Warehouse&lt;/li&gt;&lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Cognizant&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Chennai&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/819939/cbc-senior-consultant-at-cognizant/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/819939/cbc-senior-consultant-at-cognizant/</link>
  <title>[Full Time] CBC Senior Consultant at Cognizant</title>
  <dc:date>Sat, 11 Apr 2026 20:03:19 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/820144/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;Project Role :&lt;/b&gt;Data Architect&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Azure Data Services&lt;br&gt;&lt;b&gt;Good to have skills :&lt;/b&gt;Python (Programming Language), Data Engineering, Microsoft Power Business Intelligence (BI)&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:&lt;br&gt;As a Data Architect, you will define the data requirements and structure for the application. A typical day involves collaborating with various teams to model and design the application data structure, ensuring optimal storage and integration solutions are in place. You will engage in discussions to align data strategies with business objectives, while also addressing any challenges that arise in the data architecture process. Your role will require a keen understanding of data flows and the ability to translate complex requirements into actionable designs, fostering a collaborative environment that encourages innovation and efficiency. &lt;b&gt;Roles &amp;amp; Responsibilities:&lt;/b&gt;&lt;br&gt;&lt;li&gt;Expected to be an SME.&lt;/li&gt;&lt;li&gt;Collaborate and manage the team to perform.&lt;/li&gt;&lt;li&gt;Responsible for team decisions.&lt;/li&gt;&lt;li&gt;Engage with multiple teams and contribute on key decisions.&lt;/li&gt;&lt;li&gt;Provide solutions to problems for their immediate team and across multiple teams.&lt;/li&gt;&lt;li&gt;Facilitate knowledge sharing sessions to enhance team capabilities.&lt;/li&gt;&lt;li&gt;Develop and maintain documentation related to data architecture and design. &lt;b&gt;Professional &amp;amp; Technical Skills:&lt;/b&gt;&lt;br&gt;&lt;b&gt;Must To Have Skills:&lt;/b&gt;&lt;br&gt;Proficiency in Microsoft Azure Data Services.&lt;/li&gt;&lt;li&gt;Good To Have Skills:Experience with Data Engineering, Python (Programming Language), Microsoft Power Business Intelligence (BI).&lt;/li&gt;&lt;li&gt;Strong understanding of data modeling techniques and best practices.&lt;/li&gt;&lt;li&gt;Experience with data integration tools and ETL processes.&lt;/li&gt;&lt;li&gt;Familiarity with cloud-based data storage solutions and architectures. &lt;b&gt;Additional Information:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;The candidate should have minimum 7.5 years of experience in Microsoft Azure Data Services.&lt;/li&gt;&lt;li&gt;This position is based at our Hyderabad office.&lt;/li&gt;&lt;li&gt;A 15 years full time education is required.&lt;b&gt;Qualification&lt;/b&gt;&lt;br&gt;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/820144/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/820144/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Fri, 10 Apr 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/820141/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Azure Databricks&lt;br&gt;&lt;b&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum 5 year(s) of experience is required&lt;br&gt;&lt;b&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:&lt;br&gt;As a Data Architect, a typical day involves defining the data requirements and designing the structure necessary for the application. This role includes modeling the data architecture, planning how data will be stored efficiently, and ensuring seamless integration across various components. The position requires a thoughtful approach to organizing data to support application functionality and scalability, collaborating with different stakeholders to align data strategies with project goals, and continuously refining data models to meet evolving needs.Key Responsibilities&lt;br&gt;A.Function as the Lead Data Architect for a small, simple project/proposal or as a team lead for medium/large sized project or proposal &lt;br&gt;B.Discuss specific Big data architecture and related issues with client architect/team (in area of expertise) &lt;br&gt;C.Worked in implementation of Databricks Gen AI/Agentic AI use case &lt;br&gt;D.Knowledge in LLM and Prompt engineering, AI foundry &lt;br&gt;E.Candidate should have worked in Data governance Solution &lt;br&gt;F.Analyze and assess the impact of the requirements on the data and its lifecycle &lt;br&gt;G.Lead Big data architecture and design medium-big Cloud based, Data and Analytical Solutions using Lambda architecture. &lt;br&gt;H.Breadth of experience in various client scenarios and situations &lt;br&gt;I.Experienced in Big Data Architecture-based sales and delivery &lt;br&gt;J.Thought leadership and innovation &lt;br&gt;K.Lead creation of new data assets offerings &lt;br&gt;L.Experience in handling OLTP and OLAP data workloads &lt;br&gt; Technical Experience:&lt;br&gt;A.Experience working in Medallion architecture involving Delta lake house principles &lt;br&gt;B.Expert level in Designing and Architect solutions in Azure Databricks, Azure Data lake, Delta Lake implementation. &lt;br&gt;C.Experience in Databricks GenAI Implementation &lt;br&gt;D.Experience in Azure purview/Profisee/Unity Catalog &lt;br&gt;E.Well versed in Real time and batch streaming concepts and experience in its implementation &lt;br&gt;F. Expert level experience in Azure cloud technologies like PySpark, Databricks, Python, Scala and SQL. &lt;br&gt;G.Exp in one or more Real-time/batch ingestion including:Azure Delta live tables , Autoloader &lt;br&gt;H.Exp in handling medium to large Big Data implementations &lt;br&gt;I. Strong understanding of data strategy. Data Quality and Delta lake components &lt;br&gt;J.For Level 8 - Candidate must have 10-12 years of IT experience and around 5 years of extensive Big data experience (design + build) in Databricks &lt;br&gt;K.For Level 9 - Candidate must have 7-10 years of IT experience and around 5 years of Big data experience (design + build) in Databricks &lt;br&gt;L.Architect for a medium sized client delivery project Professional Experience:&lt;br&gt;A.Should be able to drive the technology design meetings, propose technology design and architecture &lt;br&gt;B.Should have excellent client communication skills &lt;br&gt;C.Should have good analytical and problem-solving skills &lt;br&gt; Educational Qualification:&lt;br&gt;A.Must have:BE/BTech/MCA &lt;br&gt;B.Good to have:ME/MTech &lt;br&gt;&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;&lt;br&gt;15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Pune&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/820141/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/820141/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Tue, 31 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/820041/support-engineer-iii-just-walk-out-tech-at-amazon/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;As a Support Engineer in JWO Team, you seek resolution to problems and mitigate risk, always ensuring a Customer Obsessed experience has occurred. You will be working on services with a direct impact on the customer experience. If you are excited about the opportunity to learn and work on distributed systems, enjoy trouble shooting and solving complex problems, consider the opportunities to work with Amazon Physical Stores. You will help solve a variety of challenges and offer your expertise in growing the knowledge of your peers via team collaboration. You will be counted on to identify areas of improvement and drive projects to implement them. We consistently whiteboard so be comfortable writing and supporting your ideas on the team board. You will play an active role in defining the support processes for technologies in partnership with other technology leaders within and possibly outside the team. You should be comfortable with a level of ambiguity that s higher than most projects and relish the idea of solving big challenges. You will also mentor other engineers in your area of expertise. Along the way, we guarantee that you ll work hard, have fun and impact many customers! This role requires the flexibility to work 5 days a week (occasionally on weekends) on a rotational basis. AWS Support is 24x7x365 operations and work timings for this role is in India night time i.e. 10 PM to 6 AM IST or 1 PM to 10 PM IST. You are expected to work in night shifts hours based on business requirements. - 2+ years of software development, or 2+ years of technical support experience &lt;br&gt; - Experience scripting in modern program languages &lt;br&gt; - Experience troubleshooting and debugging technical systems - Knowledge of web services, distributed systems, and web application development &lt;br&gt; - Experience troubleshooting &amp;amp; maintaining hardware &amp;amp; software RAID &lt;br&gt; - Experience with REST web services, XML, JSON &lt;br&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Internet&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Amazon&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/820041/support-engineer-iii-just-walk-out-tech-at-amazon/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/820041/support-engineer-iii-just-walk-out-tech-at-amazon/</link>
  <title>[Full Time] Support Engineer III, Just Walk Out Tech at Amazon</title>
  <dc:date>Mon, 30 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/820055/support-engineer-iii-just-walk-out-tech-at-amazon/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;As a Support Engineer III in the Technical Operations Center (TOC), you are a technical leader and force multiplier for the team. You go beyond resolving incidents you own the patterns behind them, drive the initiatives that eliminate them, and raise the bar for how the team operates. You will lead oncall response for complex, high-severity incidents, own deep dive investigations end-to-end, and partner with Dev teams to drive automation and lasting fixes. On non-oncall shifts, you lead reduction initiatives, mentor peers, and shape the processes that make the team stronger. Key job responsibilities &lt;br&gt; - Lead oncall response for JWO store incidents as the primary responder, owning triage, escalation, and resolution for high-severity, complex incidents within SLA windows &lt;br&gt; - Own deep dive investigations end-to-end identifying systemic failure patterns, driving root cause analyses, and partnering with Dev teams to validate that fixes hold &lt;br&gt; - Manage rotational shift coverage as part of a globally distributed follow-the-sun model, including weekend rotations, ensuring seamless handoffs and shift health documentation &lt;br&gt; - Drive zero-touch resolution and automation initiatives identifying manual intervention patterns, proposing runbook candidates, and seeing them through to implementation &lt;br&gt; - Partner with Dev and engineering teams during escalations, providing structured reproduction steps, impact assessments, and technical recommendations &lt;br&gt; - Own SOP development and knowledge base quality identifying gaps, authoring updates, and ensuring the teams operational documentation reflects current best practices &lt;br&gt; - Mentor SE1s and SE2s through oncall shadowing, deep dive reviews, and day-to-day coaching &lt;br&gt; - Lead team-level operational reviews, presenting data-backed findings and driving follow-through on action items About the team &lt;br&gt; - Diverse Experiences Amazon values diverse backgrounds. Whether your career is just starting or has followed a non-traditional path, we encourage you to apply. &lt;br&gt; - Why AWS AWS is the worlds most comprehensive cloud platform, trusted by startups and Global 500 companies alike to power their businesses. &lt;br&gt; - Work/Life Balance We value work-life harmony and strive for flexibility, so our people can thrive both at work and at home. &lt;br&gt; - Inclusive Team Culture Employee-led affinity groups and inclusion events foster collaboration and empower our people to bring bold, fresh perspectives. &lt;br&gt; - Mentorship and Career Growth Endless knowledge-sharing, mentorship, and career-advancing resources to help you grow into a better-rounded professional. - 2+ years of software development, or 2+ years of technical support experience &lt;br&gt; - Experience scripting in modern program languages &lt;br&gt; - Experience troubleshooting and debugging technical systems - Knowledge of web services, distributed systems, and web application development &lt;br&gt; - Experience with REST web services, XML, JSON&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Internet&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Amazon&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/820055/support-engineer-iii-just-walk-out-tech-at-amazon/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/820055/support-engineer-iii-just-walk-out-tech-at-amazon/</link>
  <title>[Full Time] Support Engineer III, Just Walk Out Tech at Amazon</title>
  <dc:date>Fri, 27 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/819986/support-engineer-iii-just-walk-out-tech-at-amazon/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;As a Support Engineer III in the Technical Operations Center (TOC), you are a technical leader and force multiplier for the team. You go beyond resolving incidents you own the patterns behind them, drive the initiatives that eliminate them, and raise the bar for how the team operates. You will lead oncall response for complex, high-severity incidents, own deep dive investigations end-to-end, and partner with Dev teams to drive automation and lasting fixes. On non-oncall shifts, you lead reduction initiatives, mentor peers, and shape the processes that make the team stronger. &lt;strong&gt; Key job responsibilities &lt;/strong&gt; &lt;br&gt; - Lead oncall response for JWO store incidents as the primary responder, owning triage, escalation, and resolution for high-severity, complex incidents within SLA windows &lt;br&gt; - Own deep dive investigations end-to-end identifying systemic failure patterns, driving root cause analyses, and partnering with Dev teams to validate that fixes hold &lt;br&gt; - Manage rotational shift coverage as part of a globally distributed follow-the-sun model, including weekend rotations, ensuring seamless handoffs and shift health documentation &lt;br&gt; - Drive zero-touch resolution and automation initiatives identifying manual intervention patterns, proposing runbook candidates, and seeing them through to implementation &lt;br&gt; - Partner with Dev and engineering teams during escalations, providing structured reproduction steps, impact assessments, and technical recommendations &lt;br&gt; - Own SOP development and knowledge base quality identifying gaps, authoring updates, and ensuring the teams operational documentation reflects current best practices &lt;br&gt; - Mentor SE1s and SE2s through oncall shadowing, deep dive reviews, and day-to-day coaching &lt;br&gt; - Lead team-level operational reviews, presenting data-backed findings and driving follow-through on action items - Why AWS AWS is the worlds most comprehensive cloud platform, trusted by startups and Global 500 companies alike to power their businesses. &lt;br&gt; - Work/Life Balance We value work-life harmony and strive for flexibility, so our people can thrive both at work and at home. &lt;br&gt; - Inclusive Team Culture Employee-led affinity groups and inclusion events foster collaboration and empower our people to bring bold, fresh perspectives. &lt;br&gt; - Mentorship and Career Growth Endless knowledge-sharing, mentorship, and career-advancing resources to help you grow into a better-rounded professional. - 2+ years of software development, or 2+ years of technical support experience &lt;br&gt; - Experience scripting in modern program languages &lt;br&gt; - Experience troubleshooting and debugging technical systems - Knowledge of web services, distributed systems, and web application development &lt;br&gt; - Experience with REST web services, XML, JSON &lt;br&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Internet&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Amazon&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/819986/support-engineer-iii-just-walk-out-tech-at-amazon/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/819986/support-engineer-iii-just-walk-out-tech-at-amazon/</link>
  <title>[Full Time] Support Engineer III, Just Walk Out Tech at Amazon</title>
  <dc:date>Fri, 27 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/819827/ey-gds-consulting-ai-and-data-snowflake-architect-manager-at-ey/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &amp;nbsp; &lt;div&gt; &lt;br&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;strong&gt; The opportunity &lt;/strong&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; We re looking for candidates with strong technology and data understanding in the big data engineering space, with proven delivery capability. &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;strong&gt; Your key responsibilities &lt;/strong&gt; &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; Design scalable data architectures leveraging Snowflake capabilities &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Develop and implement optimized data models (star and snowflake schemas) &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Develop and deploy big data pipelines using Snowflake Cloud DW &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Design, develop, and migrate ETL routines from on prem to cloud &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Engage with senior leaders to understand business goals &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Optimize model code for performance &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;strong&gt; Skills and attributes for success &lt;/strong&gt; &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; Hands-on experience in data warehousing and ETL &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Strong Snowflake development and modeling experience &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Experience with roles, schemas, databases, and integrations (DBT, ETL tools) &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Knowledge of performance tuning and resource monitors &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Programming experience with Java, Scala, or Python &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Experience with batch and real-time/stream analytics &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Strong understanding of distributed computing patterns &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;strong&gt; To qualify for the role, you must have &lt;/strong&gt; &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; Computer science graduate or equivalent &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; 12 14+ years of industry experience &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Agile delivery experience &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Strong communication skills &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Deep technical expertise in Snowflake &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Experience deploying Snowflake using best practices &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Ability to work hands-on with customers &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;strong&gt; Ideally, you ll also have &lt;/strong&gt; &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; Client management skills &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;strong&gt; What we look for &lt;/strong&gt; &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; Minimum 5 years as Analytics Architect &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Around 2 years of Snowflake experience &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Strong technical curiosity and learning mindset &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;strong&gt; &lt;/strong&gt; &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; Meaningful and challenging projects &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Continuous learning and coaching &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Flexible working environment &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;div&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Accounting / Auditing&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Architect / Designer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;EY&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/819827/ey-gds-consulting-ai-and-data-snowflake-architect-manager-at-ey/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/819827/ey-gds-consulting-ai-and-data-snowflake-architect-manager-at-ey/</link>
  <title>[Full Time] Ey - Gds Consulting - Ai And Data - Snowflake Architect-manager at EY</title>
  <dc:date>Sat, 11 Apr 2026 12:52:54 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/820093/python-developer-airlines-domain-at-globallogic/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;Description:&lt;/b&gt; &lt;br&gt;&lt;p&gt;Skill Set: Python Developer with experience in Linux/Unix&lt;br&gt;Mandatory: Must have Aviation/Airline experience with Crew Management systems/projects (crew scheduling, rostering, tracking, recovery, or training management)&lt;br&gt;Work Location: Bangalore&lt;br&gt;Experience: 8 to 30 years&lt;/p&gt;&lt;br&gt;&lt;b&gt;Requirements:&lt;/b&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Product configuration and extension - &lt;/strong&gt;&lt;/p&gt;&lt;p&gt;Configure and extend existing functionality, including business logic, data models, reporting, and user interface adaptations.&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Integration design and implementation - &lt;/strong&gt;&lt;/p&gt;&lt;p&gt;Design, build, and evolve integration points with internal systems as well as customer and third-party products.&lt;/p&gt;&lt;p&gt;&lt;strong&gt;DevOps and delivery enablement - &lt;/strong&gt;&lt;/p&gt;&lt;p&gt;Contribute to CI/CD pipelines, automated testing, and release delivery to support reliable and repeatable deployments.&lt;/p&gt;&lt;p&gt;&lt;strong&gt; Client collaboration and requirements capture - &lt;/strong&gt;&lt;/p&gt;&lt;p&gt;Work closely with client stakeholders to understand operational needs and business objectives, translating these into implementable requirements.&lt;/p&gt;&lt;p&gt;&lt;strong&gt; Quality-driven development - &lt;/strong&gt;&lt;/p&gt;&lt;p&gt;Apply strong development practices, including automated test development and continuous validation, to ensure solution quality and maintainability.&lt;/p&gt;&lt;br&gt;&lt;b&gt;Job Responsibilities:&lt;/b&gt; &lt;br&gt;&lt;p&gt;&lt;strong&gt;Technical skills&lt;/strong&gt;&lt;/p&gt;&lt;p&gt; Rave (Local functional development language)&lt;/p&gt;&lt;p&gt; Python (with software development and programming)&lt;/p&gt;&lt;p&gt; Gherkin &amp;amp; Behave (for elaborated tests structure)&lt;/p&gt;&lt;p&gt; XML (for data model definitions, layout configuration, misc. configuration and data integration)&lt;/p&gt;&lt;p&gt; Json (for advanced data integration)&lt;/p&gt;&lt;p&gt; CSV (for simple data integration)&lt;/p&gt;&lt;p&gt; Bash Scripting (for general automation scripts)&lt;/p&gt;&lt;p&gt; SQL (queries to the database)&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Preferred qualifications&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&amp;nbsp; 3+ years experience with software development and programming.&lt;/p&gt;&lt;p&gt; 2+ years experience with Python.&lt;/p&gt;&lt;p&gt; 2+ years experience effectively working in a Linux/Unix environment&lt;/p&gt;&lt;p&gt; Airline and specifically Crew and/or Operations planning experience&lt;/p&gt;&lt;p&gt; Understanding of AWS / Kubernetes&lt;/p&gt;&lt;br&gt;&lt;b&gt;What We Offer: &lt;/b&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Exciting Projects:&lt;/strong&gt; We focus on industries like High-Tech, communication, media, healthcare, retail and telecom. Our customer list is full of fantastic global brands and leaders who love what we build for them.&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Collaborative Environment:&lt;/strong&gt; You Can expand your skills by collaborating with a diverse team of highly talented people in an open, laidback environment  or even abroad in one of our global centers or client facilities!&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Work-Life Balance:&lt;/strong&gt; GlobalLogic prioritizes work-life balance, which is why we offer flexible work schedules, opportunities to work from home, and paid time off and holidays.&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Professional Development:&lt;/strong&gt; Our dedicated Learning &amp;amp; Development team regularly organizes Communication skills training(GL Vantage, Toast Master),Stress Management program, professional certifications, and technical and soft skill trainings.&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Excellent Benefits:&lt;/strong&gt; We provide our employees with competitive salaries, family medical insurance, Group Term Life Insurance, Group Personal Accident Insurance , NPS(National Pension Scheme ), Periodic health awareness program, extended maternity leave, annual performance bonuses, and referral bonuses.&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Fun Perks:&lt;/strong&gt; We want you to love where you work, which is why we host sports events, cultural activities, offer food on subsidies rates, Corporate parties. Our vibrant offices also include dedicated GL Zones, rooftop decks and GL Club where you can drink coffee or tea with your colleagues over a game of table and offer discounts for popular stores and restaurants!&lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Globallogic&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/820093/python-developer-airlines-domain-at-globallogic/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/820093/python-developer-airlines-domain-at-globallogic/</link>
  <title>[Full Time] Python Developer - Airlines Domain at Globallogic</title>
  <dc:date>Mon, 06 Apr 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/819351/data-architect-aws-modernization-at-trianz/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; &lt;u&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Company Overview &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/u&gt; &lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Trianz is an &lt;b&gt;applied AI solutions company&lt;/b&gt; that accelerates customer business transformation through AI powered &lt;b&gt;&quot;Transformation Services as a Software Model&quot;&lt;/b&gt;. With 25+ years of transforming enterprises, weve evolved to a product-led, platform-driven organization serving global enterprises across Financial Services, Insurance, Healthcare, Hi-Tech, Manufacturing, and other industries. &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;With global presence across 4 continents, our platform portfolio under the unified &lt;b&gt;Concierto brand&lt;/b&gt; delivers end-to-end transformations including solutions for &lt;b&gt;Migrate, Manage, Maximize, Modernize, Insights &amp;amp; Agentic AI, and SecOps&lt;/b&gt; - delivered through strategic partnerships with leading hyperscalers. &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Were building the premier innovation-led organization in the digital transformation space through AI-first methodologies and data-driven excellence - &lt;b&gt;RevolutionAIzing Transformations.&lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Role Summary&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;We are seeking an experienced &lt;b&gt;Data Architect&lt;/b&gt; to lead the enterprise-wide modernization of our data infrastructure onto AWS. The role is strategic and hands-on, responsible for designing a multi-zone data architecture that enables seamless integration, analytics, and reporting while reducing operational overhead and vendor dependencies. The architect will serve as a bridge between business stakeholders, data engineers, and analysts to ensure a cloud-first, scalable, and governed data platform.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Key Responsibilities:&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Lead the design and implementation of a multi-zone AWS data architecture (Raw/Landing, Integration, Analytical, Reporting zones).&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Define and enforce enterprise data standards, data modeling conventions, and governance practices.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Collaborate with business units to translate analytics and reporting requirements into scalable data solutions.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Assess on-premises systems (Oracle, Redshift, PostgreSQL, Access) and plan their migration to AWS, optimizing for performance, cost, and reliability.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Implement best practices for data security, access control, and compliance.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Oversee data quality frameworks, ensuring consistency, accuracy, and traceability of metrics.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Evaluate and integrate AWS-native and third-party tools to reduce operational complexity and vendor dependency.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Mentor and guide data engineers and analysts on architecture best practices and data governance.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Continuously monitor emerging AWS technologies and data trends to enhance the platforms capabilities.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Roles &amp;amp; Responsibilities:&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Conduct a comprehensive assessment of current on-premises and cloud data systems (Oracle, Redshift, PostgreSQL, Access).&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Identify data silos, tool fragmentation, inconsistencies, and operational challenges.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Analyze existing data flows, reporting mechanisms, and metrics definitions.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Engage with business stakeholders and technical teams to understand requirements, pain points, and strategic goals.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Propose a multi-zone AWS data architecture (Raw/Landing, Integration, Analytical, Reporting zones) aligned with business needs.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Recommend AWS services and tools for ingestion, transformation, storage, analytics, and reporting.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Highlight data governance, security, and compliance requirements for the proposed architecture.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Provide a roadmap and transition plan for moving from current systems to AWS-based architecture.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Prepare documentation and presentations for executive and stakeholder review.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Required Skills &amp;amp; Experience:&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;12+ years in data architecture or related roles with enterprise-scale projects.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Hands-on experience with AWS data services (S3, Glue, Redshift, Athena, Lake Formation, EMR).&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Strong knowledge of ETL/ELT processes, SQL, and data modeling techniques.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Proven experience in cloud migration and multi-database environments.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Strong understanding of data governance, security, and compliance frameworks.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Excellent communication and stakeholder management skills.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Certifications&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;AWS Certified Solutions Architect Professional&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/b&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; (preferred)&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;AWS Certified Data Analytics Specialty&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/b&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; (highly desirable)&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Any other cloud/data certifications (e.g., Snowflake, GCP/Azure knowledge a plus)&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Preferred / Nice-to-Have&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Experience with BI tools (Power BI, Tableau, Looker) and self-service analytics.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Exposure to DevOps practices for data pipelines.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Familiarity with big data frameworks (Spark, Hadoop) and AI/ML integration.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Previous experience in cross-functional, global teams.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Impact:&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/b&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; This role directly influences the organization ability to make data-driven decisions, reduces siloed operations, and ensures a scalable, modern data platform.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Location&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/b&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;-Hyderabad, Bangalore&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Timing :&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/b&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;-1:00 PM to 10:00 PM IST&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; &lt;u&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Why choose Trianz&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/u&gt; &lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Personal Growth:&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/b&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; Startup agility with enterprise impact. Experience rapid innovation cycles while working on Fortune 500 transformations.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;AI-First Future:&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/b&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; Where humans and AI revolutionize business. Lead the charge in implementing AI-driven transformation at scale.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Global Impact:&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/b&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; Shape transformation across continents. Work with diverse teams and clients spanning Americas, Europe, Asia, and beyond.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Executive Access:&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/b&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; Direct impact on Fortune 500 strategies. Work alongside C-suite leaders and influence major business decisions.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Ownership &amp;amp; Entrepreneurial Spirit:&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/b&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; Your ideas, our platform, global impact. Zero bureaucracy culture with decision-making autonomy and rapid execution. &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Equal Employment Opportunity&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Trianz is an Equal Opportunity Employer and does not discriminate on the basis of race, color, creed, national or ethnic origin, gender, religion, disability, age, political affiliation or belief, disabled veteran, veterans (except in those special circumstances permitted or mandated by law).&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Trianz&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/819351/data-architect-aws-modernization-at-trianz/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/819351/data-architect-aws-modernization-at-trianz/</link>
  <title>[Full Time] Data Architect -AWS Modernization at Trianz</title>
  <dc:date>Fri, 03 Apr 2026 15:17:28 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/819380/custom-software-engineer-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt; About The Role &lt;/b&gt; &lt;br&gt; &lt;b&gt;Project Role &lt;/b&gt;Custom Software Engineer&lt;br&gt; &lt;b&gt;Project Role Description &lt;/b&gt;Develop custom software solutions to design, code, and enhance components across systems or applications. Use modern frameworks and agile practices to deliver scalable, high-performing solutions tailored to specific business needs. &lt;br&gt; &lt;b&gt;Must have skills &lt;/b&gt;SAP BusinessObjects Business Intelligence&lt;br&gt; &lt;b&gt;Good to have skills &lt;/b&gt;NA&lt;br&gt;Minimum 5 year(s) of experience is required&lt;br&gt; &lt;b&gt;Educational Qualification &lt;/b&gt;15 years full time education &lt;b&gt;Summary&lt;/b&gt; &lt;br&gt;As a Custom Software Engineer, you will engage in the development of custom software solutions that are designed to meet specific business needs. Your typical day will involve collaborating with various teams to design, code, and enhance components across systems or applications. You will utilize modern frameworks and agile practices to ensure that the solutions you deliver are scalable and high-performing, contributing to the overall success of the projects you are involved in.BO Skills&lt;br&gt;Crystal ReportsDesigning highly formatted, presentation-quality reports with complex sub reports, formulas, and drill-down functionality &lt;br&gt;Web Intelligence (Webi)Expertise in creating ad hoc, interactive, and dashboard-style reports using multiple data providers, merge dimensions, variables, and prompts.&lt;br&gt;Information Design Tool (IDT)Experience in building and maintaining universes, including joins, cardinalities, hierarchies, and performance optimization.&lt;br&gt;Data Warehousing ConceptsSolid understanding of ETL processes, dimensional modeling, and star/snowflake schemas.&lt;br&gt;BO AdministrationFamiliarity with promotion management, report scheduling, and implementing security.&lt;br&gt;Database IntegrationExperience connecting to platforms like Oracle, DB2 and SQL Server. Cloud DB is a plus.&lt;br&gt;ETL and Scheduling ToolsKnowledge of tools such as Informatica, SAP Data Services, AutoSys, and StoneBranch is a plus.&lt;br&gt;Governance and SecurityApplying proper version control, row/object/report-level security, and managing promotions effectively.SQL Skills&lt;br&gt;Query Writing &amp;amp; OptimizationWriting, optimizing, and troubleshooting SQL queries for data extraction, transformation, and performance tuning.&lt;br&gt;Complex JoinsProficiency in inner, outer, self-joins, and lateral joins.&lt;br&gt;SubqueriesAbility to write scalar, correlated, and nested subqueries for dynamic data retrieval and filtering.&lt;br&gt;Common Table Expressions (CTEs)Including recursive CTEs for organizing complex logic and analyzing hierarchical data.&lt;br&gt;Aggregation &amp;amp; GroupingUsing functions like SUM, AVG, COUNT, MAX, MIN with GROUP BY and HAVING to produce &lt;b&gt;Summary&lt;/b&gt; statistics.&lt;br&gt;Advanced CASE StatementsImplementing conditional logic and business rules within queries.&lt;br&gt;Data Transformation &amp;amp; PivotingReshaping data for crosstab or matrix-style reports.&lt;br&gt;Performance TuningAnalyzing execution plans, managing indexes, and optimizing joins and subqueries.&lt;br&gt;Advanced Filtering &amp;amp; ManipulationUsing WHERE, HAVING, string/date functions, and cleansing techniques to meet reporting requirements.In addition to technical proficiency, we seek individuals who demonstrate the following competencies to ensure they can deliver high-quality, business-aligned reporting solutions:Analytical Skills&lt;br&gt;Requirement AnalysisAbility to understand and interpret business requirements and translate them into effective reporting solutions.&lt;br&gt;Problem SolvingStrong troubleshooting skills for resolving performance issues and ensuring data integrity across reports.&lt;br&gt;Attention to DetailPrecision in report design, data mapping, and universe development to maintain accuracy and consistency.&lt;br&gt;Report PresentationCapability to format reports professionally and present insights clearly to stakeholders, ensuring usability and impact.Preferred Experience &amp;amp; Qualities&lt;br&gt;In addition to technical and analytical skills, we value candidates who bring well-rounded experience and professional attributes that contribute to team success and project delivery. Preferred qualifications include:&lt;br&gt;Effective CommunicationAbility to gather requirements, explain technical concepts to non-technical stakeholders, and document solutions clearly.&lt;br&gt;Project ManagementCapable of working independently, managing multiple projects simultaneously, and consistently meeting deadlines.&lt;br&gt;AdaptabilityQuick to learn new tools and technologies, and responsive to evolving business needs.&lt;br&gt;Data VisualizationExperience in designing interactive dashboards, scorecards, and visualizations using BO tools.&lt;br&gt;Scripting &amp;amp; AutomationFamiliarity with scripting languages (e.g., Python, SDK, JDK) for automation and integration is a plus.&lt;br&gt;SDLC SupportExperience supporting the full software development lifecycle??from requirements gathering to deployment.&lt;br&gt;System AdministrationPrior involvement in BO Administration or system migration projects is advantageous. &lt;b&gt;Additional Information&lt;/b&gt; &lt;br&gt;- The candidate should have minimum 5 years of experience in SAP BusinessObjects Business Intelligence.&lt;br&gt;- This position is based at our Hyderabad office.&lt;br&gt;- A 15 years full time education is required.&lt;br&gt; - should be willing to work in B shift 12.30 to 10pm&lt;b&gt; Qualification&lt;/b&gt; &lt;br&gt;15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/819380/custom-software-engineer-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/819380/custom-software-engineer-at-accenture/</link>
  <title>[Full Time] Custom Software Engineer at Accenture</title>
  <dc:date>Fri, 03 Apr 2026 14:46:42 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/819339/custom-software-engineer-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Custom Software Engineer&lt;br&gt; &lt;b&gt;Project Role Description :&lt;/b&gt;Develop custom software solutions to design, code, and enhance components across systems or applications. Use modern frameworks and agile practices to deliver scalable, high-performing solutions tailored to specific business needs. &lt;br&gt; &lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Azure Data Services&lt;br&gt; &lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;2&lt;/b&gt; year(s) of experience is required&lt;br&gt; &lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As an Application Developer, you will be responsible for designing, building, and configuring applications to meet business process and application requirements. You will play a crucial role in developing solutions to enhance business operations and efficiency. &lt;b&gt;Roles &amp;amp; Responsibilities:&lt;/b&gt;- Expected to perform independently and become an SME.- Required active participation/contribution in team discussions.- Contribute in providing solutions to work-related problems.- Collaborate with cross-functional teams to design and develop applications.- Implement best practices for application development.- Troubleshoot and debug applications to ensure optimal performance.- Stay updated with the latest technologies and trends in application development.- Provide technical guidance and mentorship to junior team members. &lt;br&gt;&lt;b&gt;Professional &amp;amp; Technical Skills:&lt;/b&gt;- &lt;br&gt;Must To Have Skills:&lt;br&gt;Proficiency in Microsoft Azure Data Services.- Strong understanding of cloud-based application development.- Experience with data storage and management in Azure environment.- Knowledge of Azure DevOps for continuous integration and deployment.- Hands-on experience in building scalable and secure applications on Azure platform. &lt;b&gt;Additional Information:&lt;/b&gt;- The candidate should have a minimum of 3 years of experience in Microsoft Azure Data Services.- This position is based at our Bengaluru office.- A 15 years full-time education is required.&lt;br&gt; Qualification15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/819339/custom-software-engineer-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/819339/custom-software-engineer-at-accenture/</link>
  <title>[Full Time] Custom Software Engineer at Accenture</title>
  <dc:date>Fri, 03 Apr 2026 12:01:35 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/819308/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Databricks Unified Data Analytics Platform&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the data architecture aligns with the overall business objectives and technical specifications. You will collaborate with various teams to ensure that the data architecture is robust, scalable, and efficient, while also addressing any challenges that arise during the development process. Your role will be pivotal in shaping the data landscape of the organization, enabling data-driven decision-making and fostering innovation through effective data management practices. Responsibilities:Develop high-quality, scalable ETL/ELT pipelines using Databricks technologies including Delta Lake, Auto Loader, and DLT.Excellent programming and debugging skills in Python.Strong hands-on experience with Py Spark to build efficient data transformation and validation logic.Must be proficient in at least one cloud platform:AWS, GCP, or Azure.Create modular DBX functions for transformation, PII masking, and validation logic reusable across DLT and notebook pipelines.Implement ingestion patterns using Auto Loader with checkpointing and schema evolution for structured and semi-structured data.Build secure and observable DLT pipelines with DLT Expectations, supporting Bronze/Silver/Gold medallion layering.Configure Unity Catalog:set up catalogs, schemas, user/group access, enable audit logging, and define masking for PII fields.Enable secure data access across domains and workspaces via Unity Catalog External Locations, Volumes, and lineage tracking.Access and utilize data assets from the Databricks Marketplace to support enrichment, model training, or benchmarking.Collaborate with data sharing stakeholders to implement Delta Sharing both internally and externally.Integrate Power BI/Tableau/Looker with Databricks using optimized connectors (ODBC/JDBC) and Unity Catalog security controls.Build stakeholder-facing SQL Dashboards within Databricks to monitor KPIs, data pipeline health, and operational SLAs.Prepare Gen AI-compatible datasets:manage vector embeddings, index with Databricks Vector Search, and use Feature Store with ML flow.Package and deploy pipelines using Databricks Asset Bundles through CI/CD pipelines in GitHub or GitLab.Troubleshoot, tune, and optimize jobs using Photon engine and serverless compute, ensuring cost efficiency and SLA reliability.Experience with cloud-based services relevant to data engineering, data storage, data processing, data warehousing, real-time streaming, and serverless computing.Hands on Experience in applying Performance optimization techniquesUnderstanding data modeling and data warehousing principles is essential.Nice to Have:1.Certifications:Databricks Certified Professional or similar certifications.2.Machine Learning:Knowledge of machine learning concepts and experience with popular ML libraries.3.Knowledge of big data processing (e.g., Spark, Hadoop, Hive, Kafka)4.Data Orchestration:Apache Airflow.5.Knowledge of CI/CD pipelines and DevOps practices in a cloud environment.6.Experience with ETL tools like Informatica, Talend, Mati Llion, or Five Tran.7.Familiarity with DBT (Data Build Tool)Additional Information:- The candidate should have minimum 7.5 years of experience in Databricks Unified Data Analytics Platform.- This position is based at our Bengaluru office.&lt;br&gt;Educational Qualification:- 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/819308/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/819308/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Fri, 03 Apr 2026 10:47:07 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/819287/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;Project Role :&lt;/b&gt;Data Architect&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration.&lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Purview&lt;b&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;5&lt;/b&gt; year(s) of experience is required&lt;b&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;&lt;b&gt;Summary&lt;/b&gt;:&lt;/b&gt; As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration. You will collaborate with various teams to ensure that the data architecture aligns with business objectives and technical specifications, while also addressing any challenges that arise in the data management process. Your role will require you to engage in discussions about data governance and best practices, ensuring that the data architecture is robust, scalable, and efficient. You will also be responsible for documenting the data architecture and providing guidance to team members on implementation strategies.&lt;b&gt;Key Responsibilities:&lt;/b&gt;&lt;li&gt;Function as the Lead Data Architect for a small, simple project/proposal or as a team lead for medium/large sized project or proposal&lt;/li&gt;&lt;li&gt;Candidate Should have experience in Design, develop, and deploy solutions using Microsoft Purview for data governance, cataloguing, security, Quality and compliance.&lt;/li&gt;&lt;li&gt;Integrate Microsoft Purview with various data sources and platforms to establish seamless data lineage, metadata management, and governance workflows.&lt;/li&gt;&lt;li&gt;Configure and customize data classification, labelling, and sensitivity policies to ensure compliance with business standards and regulatory frameworks.&lt;/li&gt;&lt;li&gt;Candidate should have understanding of Azure Date services like Azure Data factory, Azure Databricks, Fabric and Azure SQL database.&lt;/li&gt;&lt;li&gt;Collaborate with data architects, analysts, and IT teams to map data sources and enforce governance best practices.&lt;/li&gt;&lt;li&gt;Monitor and troubleshoot Purview services, ensuring optimal performance and integration with Azure and other data platforms.&lt;/li&gt;&lt;li&gt;Document technical specifications, best practices, and governance workflows, providing training and guidance to data stewards and business users.&lt;/li&gt;&lt;li&gt;Worked in implementation of AI, Copilot use cases for Data Governance&lt;/li&gt;&lt;li&gt;Thought leadership and innovation&lt;/li&gt;&lt;li&gt;Experience in handling OLTP and OLAP data workloads, Star Snowflake schema, Entity Relationship diagram and other data architecture related concepts&lt;b&gt;Technical Experience:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;Candidate should have strong experience in developing and deploying solutions with Microsoft Purview or similar data governance platforms.&lt;/li&gt;&lt;li&gt;Proficiency in Microsoft Azure services, including Azure Data Factory, Azure Synapse Analytics, Azure SQL Database, Azure Blob Storage Delta Lake concepts.&lt;/li&gt;&lt;li&gt;Strong understanding of data governance principles, including metadata management, data cataloging, lineage tracking, and compliance frameworks of Azure Purview.&lt;/li&gt;&lt;li&gt;Experience with data integration, ETL pipelines, and data modelling to structure and organize enterprise-wide data.&lt;/li&gt;&lt;li&gt;Expert level in Designing and Architect solutions in Azure Databricks, Azure Data factory, Azure Fabric.&lt;/li&gt;&lt;li&gt;Candidate must have knowledge of Database like Azure SQL, Oracle and Azure Data Services like ADLS, Delta/Data Lake &lt;/li&gt;&lt;li&gt;Candidate should have good understanding of Python, Pyspark and AI features of Data governance tool&lt;/li&gt;&lt;li&gt;Experience in Profisee/Unity Catalog is an added advantage&lt;/li&gt;&lt;li&gt;Candidate must have 5-8 years of IT experience and around 2+ years Data &lt;/li&gt;&lt;li&gt;Governance experience &lt;/li&gt;&lt;li&gt;Architect for a medium sized client delivery project&lt;b&gt;Professional Experience:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;Should be able to drive the technology design meetings, propose technology design and architecture &lt;/li&gt;&lt;li&gt;Should have excellent client communication skills&lt;/li&gt;&lt;li&gt;Should have good analytical and problem-solving skills &lt;b&gt;Educational Qualification:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;Must have:BE/BTech/MCA&lt;/li&gt;&lt;li&gt;Good to have:ME/MTech&lt;b&gt;Qualification&lt;/b&gt; 15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/819287/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/819287/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Fri, 03 Apr 2026 09:12:47 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/819196/tanzu-data-intelligence-rabbitmq-professional-at-vmware/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt;Please Note:&lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt;1. If you are a first time user, please create your candidate login account before you apply for a job. (Click Sign In &amp;gt; Create Account)&lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt;2. If you already have a Candidate Account, please Sign-In before you apply.&lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt;Job Description:&lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;p&gt;Area: RabbitMQ Core&lt;/p&gt; &lt;p&gt; &lt;u&gt;Functions&lt;/u&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt;Design and implement features for RabbitMQ and contribute to existing RabbitMQ features like Classic Queues, Quorum Queues, Streams.&lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Develop robust and scalable producer/consumer apps in Erlang / Java / Go &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Configure and manage RabbitMQ clusters, federation, and shovels.&lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Optimize message throughput, latency, queue performance, and reliability.&lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Implement HA using RabbitMQ clustering, queue mirroring, and quorum queues.&lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Ensure developed code meets security, compliance, and operational standards.&lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;u&gt;Technical Skills&lt;/u&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt;Experience with developing and using messaging products.&lt;/li&gt; &lt;li&gt;Hands-on experience writing producer/consumer code in at least one modern language (Erlang, Java, Go, Python, &lt;u&gt;Node.js&lt;/u&gt; ).&lt;/li&gt; &lt;li&gt;High understanding of Queue / Stream products like Kafka.&lt;/li&gt; &lt;li&gt;High understanding of the following concepts:&lt;/li&gt; &lt;li&gt;Exchanges (direct, topic, fanout, headers)&lt;/li&gt; &lt;li&gt;Queues (classic, quorum, stream)&lt;/li&gt; &lt;li&gt;Bindings &amp;amp; routing patterns&lt;/li&gt; &lt;li&gt;Acknowledgements, prefetch, delivery semantics&lt;/li&gt; &lt;li&gt;Strong debugging and log analysis skills.&lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;b&gt;Broadcom is proud to be an equal opportunity employer. We will consider qualified applicants without regard to race, color, creed, religion, sex, sexual orientation, national origin, citizenship, disability status, medical condition, pregnancy, protected veteran status or any other characteristic protected by federal, state, or local law. We will also consider qualified applicants with arrest and conviction records consistent with local law.&lt;/b&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt;If you are located outside USA, please be sure to fill out a home address as this will be used for future correspondence.&lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Software Product&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;VMware&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Kalyani&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/819196/tanzu-data-intelligence-rabbitmq-professional-at-vmware/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/819196/tanzu-data-intelligence-rabbitmq-professional-at-vmware/</link>
  <title>[Full Time] Tanzu Data Intelligence RabbitMQ Professional at VMware</title>
  <dc:date>Fri, 03 Apr 2026 03:02:12 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/819166/data-architect-at-virtusa/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&lt;strong&gt;Requirements&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Experience At least 10+ years of experience in AWS based projects. Technical skills Proficiency in Python and PySpark for data engineering tasks. Big Data Strong knowledge of Big Data technologies and data warehousing concepts. AWS services &lt;/li&gt;&lt;li&gt;Experience with AWS Data Engineering stack, including S3, RDS, Athena, Glue, Lambda, and Step Functions. SQL Strong SQL skills for data manipulation and querying. CI CD Experience with CI CD tools like Terraform and Git Actions. Soft skills &lt;/li&gt;&lt;li&gt;Good communication skills and ability to work in a multicultural team. Design and implement data pipelines Develop ETL jobs to ingest and move data within the AWS environment using tools like AWS Glue. &lt;/li&gt;&lt;li&gt;Data storage and processing Build and maintain systems for data collection storage processing and analysis using AWS services such as S3 RDS Athena and Redshift&lt;/li&gt;&lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Banking&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Virtusa&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Chennai&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/819166/data-architect-at-virtusa/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/819166/data-architect-at-virtusa/</link>
  <title>[Full Time] Data Architect at Virtusa</title>
  <dc:date>Fri, 03 Apr 2026 01:59:47 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/819175/data-architect-scala-at-happiest-minds/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;b&gt; &lt;strong&gt;Job Summary&lt;/strong&gt; &lt;/b&gt; &lt;div&gt;Senior Data Architect with 10+ years of experience to design, build, and optimize scalable data pipelines and streaming solutions. The ideal candidate should have strong expertise in Spark (Streaming &amp;amp; Scala), Databricks, Delta Lake, Airflow, Snowflake, and AWS services, with a solid understanding of data engineering best practices and distributed systems. &lt;div&gt; &lt;div&gt; &lt;strong&gt;Key Responsibilities&lt;/strong&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt;Design and develop scalable &lt;strong&gt;batch and real-time data pipelines&lt;/strong&gt; using Spark (Scala) and Spark Streaming &lt;/div&gt;&lt;/li&gt; &lt;li&gt; &lt;div&gt;Build and manage data workflows using &lt;strong&gt;Airflow&lt;/strong&gt; &lt;/div&gt;&lt;/li&gt; &lt;li&gt; &lt;div&gt;Develop and optimize data solutions on &lt;strong&gt;Databricks&lt;/strong&gt; with &lt;strong&gt;Delta Lake&lt;/strong&gt; &lt;/div&gt;&lt;/li&gt; &lt;li&gt; &lt;div&gt;Integrate and manage data across &lt;strong&gt;Snowflake&lt;/strong&gt; and AWS ecosystem &lt;/div&gt;&lt;/li&gt; &lt;li&gt; &lt;div&gt;Work with AWS services such as &lt;strong&gt;S3, ECS, and MSK (Kafka)&lt;/strong&gt; for data ingestion and processing &lt;/div&gt;&lt;/li&gt; &lt;li&gt; &lt;div&gt;Ensure data quality, reliability, and performance of pipelines &lt;/div&gt;&lt;/li&gt; &lt;li&gt; &lt;div&gt;Collaborate with cross-functional teams to understand data requirements and deliver solutions &lt;/div&gt;&lt;/li&gt; &lt;li&gt; &lt;div&gt;Implement best practices for &lt;strong&gt;data governance, security, and privacy (CCPA/GDPR)&lt;/strong&gt; &lt;/div&gt;&lt;/li&gt; &lt;li&gt; &lt;div&gt;Troubleshoot and optimize performance issues in large-scale distributed systems &lt;/div&gt;&lt;/li&gt; &lt;li&gt; &lt;div&gt;Mentor junior engineers and contribute to architectural decisions &lt;/div&gt;&lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;div&gt; &lt;strong&gt;Required Skills&lt;/strong&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt;10+ years of experience in &lt;strong&gt;Data Engineering / Big Data&lt;/strong&gt; &lt;/div&gt;&lt;/li&gt; &lt;li&gt; &lt;div&gt;Strong hands-on experience with: &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;strong&gt;Apache Spark (Scala) &amp;amp; Spark Streaming&lt;/strong&gt; &lt;/div&gt;&lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;strong&gt;Databricks &amp;amp; Delta Lake&lt;/strong&gt; &lt;/div&gt;&lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;strong&gt;Apache Airflow&lt;/strong&gt; &lt;/div&gt;&lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;strong&gt;Snowflake&lt;/strong&gt; &lt;/div&gt;&lt;/li&gt; &lt;/ul&gt; &lt;/div&gt;&lt;/li&gt; &lt;li&gt; &lt;div&gt;Solid experience with AWS services: &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;strong&gt;S3, ECS, MSK (Kafka)&lt;/strong&gt; &lt;/div&gt;&lt;/li&gt; &lt;/ul&gt; &lt;/div&gt;&lt;/li&gt; &lt;li&gt; &lt;div&gt;Strong programming skills in &lt;strong&gt;Scala/Java or Python&lt;/strong&gt; &lt;/div&gt;&lt;/li&gt; &lt;li&gt; &lt;div&gt;Deep understanding of &lt;strong&gt;distributed data processing and ETL/ELT design patterns&lt;/strong&gt; &lt;/div&gt;&lt;/li&gt; &lt;li&gt; &lt;div&gt;Experience in building &lt;strong&gt;high-performance, scalable data pipelines&lt;/strong&gt; &lt;/div&gt;&lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;div&gt; &lt;strong&gt;Good to Have&lt;/strong&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt;Experience with &lt;strong&gt;Datadog&lt;/strong&gt; for monitoring and observability &lt;/div&gt;&lt;/li&gt; &lt;li&gt; &lt;div&gt;Working knowledge of &lt;strong&gt;Java and/or Python&lt;/strong&gt; &lt;/div&gt;&lt;/li&gt; &lt;li&gt; &lt;div&gt;Familiarity with &lt;strong&gt;SBT (Scala Build Tool)&lt;/strong&gt; &lt;/div&gt;&lt;/li&gt; &lt;li&gt; &lt;div&gt;Experience with &lt;strong&gt;GitHub Actions&lt;/strong&gt; for CI/CD pipelines &lt;/div&gt;&lt;/li&gt; &lt;li&gt; &lt;div&gt;Understanding of &lt;strong&gt;data privacy regulations (CCPA, GDPR)&lt;/strong&gt; &lt;/div&gt;&lt;/li&gt; &lt;li&gt; &lt;div&gt;Experience in &lt;strong&gt;real-time streaming architectures&lt;/strong&gt; &lt;/div&gt;&lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;strong&gt;Soft Skills&lt;/strong&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt;Strong problem-solving and analytical skills &lt;/div&gt;&lt;/li&gt; &lt;li&gt; &lt;div&gt;Excellent communication and stakeholder management &lt;/div&gt;&lt;/li&gt; &lt;li&gt; &lt;div&gt;Ability to work in a fast-paced, collaborative environment &lt;/div&gt;&lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;div&gt; &lt;strong&gt;Preferred Qualifications&lt;/strong&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt;Experience in cloud-native data platforms &lt;/div&gt;&lt;/li&gt; &lt;li&gt; &lt;div&gt;Prior experience in handling &lt;strong&gt;large-scale data platforms&lt;/strong&gt; &lt;/div&gt;&lt;/li&gt; &lt;li&gt; &lt;div&gt;Exposure to &lt;strong&gt;data governance and compliance frameworks&lt;/strong&gt; &lt;/div&gt;&lt;/li&gt; &lt;/ul&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Happiest Minds&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/819175/data-architect-scala-at-happiest-minds/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/819175/data-architect-scala-at-happiest-minds/</link>
  <title>[Full Time] Data Architect - Scala at Happiest Minds</title>
  <dc:date>Fri, 03 Apr 2026 01:24:29 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/819019/senior-software-engineer-python-gen-ai-at-wells-fargo/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt;&lt;b&gt; About this role: &lt;/b&gt;&lt;/div&gt;&lt;div&gt;&lt;div&gt; Wells Fargo is seeking a Senior Software Engineer . &lt;/div&gt;&lt;div&gt; Wells Fargo is seeking a passionate Python, Spark senior developer with good exposure to Gen AI and full stack engineering. A successful candidate should be self-motivated/self-directed and demonstrate high proficiency in application design, coding, code reviews, refactoring and troubleshooting. The individual will support and assist in the analysis and design of complex technical solutions including maintaining and modifying new and existing application modules, creating necessary design documentation, participating in code review, researching production issues, test support issues etc. &lt;/div&gt;&lt;div&gt;&lt;br&gt;&lt;b&gt; In this role, you will: &lt;/b&gt;&lt;/div&gt;&lt;ul&gt;&lt;li&gt; Lead moderately complex initiatives and deliverables within technical domain environments &lt;/li&gt;&lt;li&gt; Contribute to large scale planning of strategies &lt;/li&gt;&lt;li&gt; Design, code, test, debug, and document for projects and programs associated with technology domain, including upgrades and deployments &lt;/li&gt;&lt;li&gt; Review moderately complex technical challenges that require an in-depth evaluation of technologies and procedures &lt;/li&gt;&lt;li&gt; Resolve moderately complex issues and lead a team to meet existing client needs or potential new clients needs while leveraging solid understanding of the function, policies, procedures, or compliance requirements &lt;/li&gt;&lt;li&gt; Collaborate and consult with peers, colleagues, and mid-level managers to resolve technical challenges and achieve goals &lt;/li&gt;&lt;li&gt; Lead projects and act as an escalation point, provide guidance and direction to less experienced staff &lt;/li&gt;&lt;/ul&gt;&lt;div&gt;&lt;br&gt;&lt;b&gt; Required Qualifications: &lt;/b&gt;&lt;/div&gt;&lt;ul&gt;&lt;li&gt; 4+ years of Software Engineering experience, or equivalent demonstrated through one or a combination of the following: work experience, training, military experience, education &lt;b&gt; . &lt;/b&gt;&lt;/li&gt;&lt;/ul&gt;&lt;div&gt;&lt;/div&gt;&lt;div&gt;&lt;b&gt; Desired Qualifications: &lt;/b&gt;&lt;/div&gt;&lt;/div&gt;&lt;ul&gt;&lt;li&gt; 6+ years strong development experience in Python, PySpark. &lt;/li&gt;&lt;li&gt; Understands the concepts of AI, Gen AI and has hands-on experience &lt;/li&gt;&lt;li&gt; Exposure to writing APIs, API Gateways, Microservices. &lt;/li&gt;&lt;li&gt; Experience and knowledge in continuous integration/deployment (CI/CD) using udeploy/harness, Jenkins, github, JIRA. Dev Ops. &lt;/li&gt;&lt;li&gt; Good understanding of Data modelling, Database design, Data analytics, Data warehousing concepts, Data Quality and Data Governance &lt;/li&gt;&lt;li&gt; Strong debugging, problem solving and investigative skills. Ability to assimilate disparate information (log files, error messages etc.) and pursue leads to find root cause problems. &lt;/li&gt;&lt;li&gt; Proficiency in Linux and shell scripts, job scheduling &lt;/li&gt;&lt;li&gt; Assure adherence to standards, best practices, and alignment with overall architecture &lt;/li&gt;&lt;li&gt; Strong organizational, multi-tasking, and prioritizing skills &lt;/li&gt;&lt;li&gt; Knowledge of automated testing is a plus. &lt;/li&gt;&lt;/ul&gt;&lt;div&gt;&lt;/div&gt;&lt;div&gt;&lt;b&gt;&lt;b&gt; Job Expectations: &lt;/b&gt;&lt;/b&gt;&lt;/div&gt;&lt;ul&gt;&lt;li&gt; Banking/ Financial Services Experience &lt;/li&gt;&lt;li&gt; Excellent verbal, written, and interpersonal communication skills &lt;/li&gt;&lt;li&gt; Experience with Agile delivery methodology &lt;/li&gt;&lt;li&gt; Good analytical &amp;amp; debugging skills with high attention to detail and accuracy &lt;/li&gt;&lt;li&gt; Ability to work effectively in virtual environment where key team members and partners are in various time zones and locations &lt;/li&gt;&lt;li&gt; Outstanding problem solving and decision making skills &lt;/li&gt;&lt;li&gt; Database query design and optimization &lt;/li&gt;&lt;li&gt; Ability to drive the requirements process with both technical and business partners &lt;/li&gt;&lt;li&gt; Ability to be flexible and adjust plans quickly to meet changing business needs &lt;/li&gt;&lt;/ul&gt;&lt;div&gt;&lt;/div&gt;&lt;div&gt;&lt;/div&gt;&lt;div&gt;&lt;/div&gt;&lt;div&gt;&lt;b&gt;&lt;span&gt;&lt;/span&gt;&lt;/b&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Wells Fargo&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/819019/senior-software-engineer-python-gen-ai-at-wells-fargo/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/819019/senior-software-engineer-python-gen-ai-at-wells-fargo/</link>
  <title>[Full Time] Senior Software Engineer, Python &amp;amp; Gen AI at Wells Fargo</title>
  <dc:date>Fri, 03 Apr 2026 00:34:52 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/819195/data-modeler-architect-vp-at-barclays/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;span&gt;Join us as a Data Modeler/Architect - VP at Barclays, where you ll drive innovation and creativity by developing forward-thinking, data-driven solutions. Your problem-solving mindset, determination, and collaborative spirit will be key to challenging the status quo and introducing new ideas. In this role, you ll be at the Centre of our data strategy, transforming raw information into actionable insights that power smarter decisions and operational excellence. You ll collaborate across teams to optimize data pipelines, data structure, enhance governance, and support regulatory compliance making a real impact on how Barclays delivers value.&lt;/span&gt; &lt;p&gt;&lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt;To be successful as a Data Modeler/Architect - VP, &lt;/span&gt; &lt;span&gt;you should have experience with:&lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt;Significant experience in data architecture or enterprise data modelling, ideally at a major bank or investment bank.&lt;/li&gt; &lt;li&gt;A great understanding of banking products, risk frameworks, regulatory data requirements and financial markets data.&lt;/li&gt; &lt;li&gt;Expertise in data modelling tools and frameworks with previous experience scaling models across global functions.&lt;/li&gt; &lt;li&gt;Database Systems&lt;ul&gt; &lt;li&gt;Relational: Oracle, SQL Server, PostgreSQL, MySQL&lt;/li&gt; &lt;li&gt;Columnar: Amazon Redshift, Snowflake&lt;/li&gt; &lt;li&gt;NoSQL: MongoDB, Cassandra (for some use cases)&lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt;SQL and Scripting&lt;ul&gt; &lt;li&gt;Advanced SQL (DDL, DML, performance tuning)&lt;/li&gt; &lt;li&gt;PL/SQL or T-SQL&lt;/li&gt; &lt;li&gt;Python or Shell scripting for data validation and automation&lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt;Data Warehousing &amp;amp; ETL&lt;ul&gt; &lt;li&gt;Data warehousing concepts (Inmon, Kimball)&lt;/li&gt; &lt;li&gt;ETL tools: Informatica etc&lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt;Cloud Platforms (increasingly common)&lt;ul&gt; &lt;li&gt;AWS (especially S3, Redshift, Glue)&lt;/li&gt; &lt;li&gt;Lake formation / Data Lake implementation&lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt;Markets Specific Responsibilities:&lt;ul&gt; &lt;li&gt;Lead Markets end to end data architecture strategy, defining the vision and multi year roadmaps that align business goals, regulatory drivers, and enterprise standards.&lt;/li&gt; &lt;li&gt;Govern Markets focused enterprise and domain data models, ensuring accuracy, re use, interoperability, and regulatory ready traceability across priority data domains.&lt;/li&gt; &lt;li&gt;Provide architectural leadership for Markets data products and platforms, enabling high quality design, semantic consistency, and integration across cloud and on prem ecosystems.&lt;/li&gt; &lt;li&gt;Drive and evolve data architecture governance, setting modelling, metadata, lineage, and interoperability standards, and ensuring compliance through review boards and enterprise forums.&lt;/li&gt; &lt;li&gt;Act as senior adviser to CDOs, CIOs, COOs and technical leaders, guiding complex design decisions and influencing cross functional teams with clarity and strategic insight.&lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;/ul&gt; &lt;span&gt; &lt;/span&gt; &lt;b&gt;Additional relevant skills given below are highly valued:&lt;/b&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Experience with cloud platforms (e.g. AWS, Azure) and big data ecosystems&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Familiarity with data governance frameworks and regulatory standards (e.g. BCBS239, GDPR).&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Ability to collaborate across functions and influence decision-making at senior levels.&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Knowledge of metadata management, data modelling, and documentation best practices.&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;A mindset aligned with Barclays values: Respect, Integrity, Service, Excellence, and Stewardship.&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;You may be assessed on the key critical skills relevant for success in role, such as risk and controls, change and transformation, business acumen strategic thinking and digital and technology, as well as job-specific technical skills.&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;This role is based in our Pune office.&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;b&gt;Purpose of the role&lt;/b&gt; &lt;/p&gt; &lt;p&gt;To implement data quality process and procedures, ensuring that data is reliable and trustworthy, then extract actionable insights from it to help the organisation improve its operation, and optimise resources. &lt;/p&gt; &lt;p&gt; &lt;b&gt;Accountabilities&lt;/b&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt;Investigation and analysis of data issues related to quality, lineage, controls, and authoritative source identification.&lt;/li&gt; &lt;li&gt;Execution of data cleansing and transformation tasks to prepare data for analysis.&lt;/li&gt; &lt;li&gt;Designing and building data pipelines to automate data movement and processing.&lt;/li&gt; &lt;li&gt;Development and application of advanced analytical techniques, including machine learning and AI, to solve complex business problems.&lt;/li&gt; &lt;li&gt;Documentation of data quality findings and recommendations for improvement.&lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;b&gt;Vice President Expectations&lt;/b&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt;To contribute or set strategy, drive requirements and make recommendations for change. Plan resources, budgets, and policies; manage and maintain policies/ processes; deliver continuous improvements and escalate breaches of policies/procedures..&lt;/li&gt; &lt;li&gt;If managing a team, they define jobs and responsibilities, planning for the department s future needs and operations, counselling employees on performance and contributing to employee pay decisions/changes. They may also lead a number of specialists to influence the operations of a department, in alignment with strategic as well as tactical priorities, while balancing short and long term goals and ensuring that budgets and schedules meet corporate requirements..&lt;/li&gt; &lt;li&gt;If the position has leadership responsibilities, People Leaders are expected to demonstrate a clear set of leadership behaviours to create an environment for colleagues to thrive and deliver to a consistently excellent standard. The four LEAD behaviours are: L Listen and be authentic, E Energise and inspire, A Align across the enterprise, D Develop others..&lt;/li&gt; &lt;li&gt;OR for an individual contributor, they will be a subject matter expert within own discipline and will guide technical direction. They will lead collaborative, multi-year assignments and guide team members through structured assignments, identify the need for the inclusion of other areas of specialisation to complete assignments. They will train, guide and coach less experienced specialists and provide information affecting long term profits, organisational risks and strategic decisions..&lt;/li&gt; &lt;li&gt;Advise key stakeholders, including functional leadership teams and senior management on functional and cross functional areas of impact and alignment.&lt;/li&gt; &lt;li&gt;Manage and mitigate risks through assessment, in support of the control and governance agenda.&lt;/li&gt; &lt;li&gt;Demonstrate leadership and accountability for managing risk and strengthening controls in relation to the work your team does.&lt;/li&gt; &lt;li&gt;Demonstrate comprehensive understanding of the organisation functions to contribute to achieving the goals of the business.&lt;/li&gt; &lt;li&gt;Collaborate with other areas of work, for business aligned support areas to keep up to speed with business activity and the business strategies.&lt;/li&gt; &lt;li&gt;Create solutions based on sophisticated analytical thought comparing and selecting complex alternatives. In-depth analysis with interpretative thinking will be required to define problems and develop innovative solutions.&lt;/li&gt; &lt;li&gt;Adopt and include the outcomes of extensive research in problem solving processes.&lt;/li&gt; &lt;li&gt;Seek out, build and maintain trusting relationships and partnerships with internal and external stakeholders in order to accomplish key business objectives, using influencing and negotiating skills to achieve outcomes.&lt;/li&gt; &lt;/ul&gt; &lt;p&gt;All colleagues will be expected to demonstrate the Barclays Values of Respect, Integrity, Service, Excellence and Stewardship our moral compass, helping us do what we believe is right. They will also be expected to demonstrate the Barclays Mindset to Empower, Challenge and Drive the operating manual for how we behave.&lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Financial Services&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Head - Data Base&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Barclays&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Pune&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/819195/data-modeler-architect-vp-at-barclays/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/819195/data-modeler-architect-vp-at-barclays/</link>
  <title>[Full Time] Data Modeler/Architect - VP at Barclays</title>
  <dc:date>Thu, 02 Apr 2026 20:44:28 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/818796/database-pl-sql-lead-at-iris-software/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; &lt;strong&gt; Core Technical Skill sets: &lt;/strong&gt; &lt;/span&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; Oracle PL/SQL developer with version 12+ &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Experience working on Unix/Linux &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Willingness to learn new technologies &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &amp;nbsp; &lt;span&gt; &lt;strong&gt; &lt;/strong&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;strong&gt; Requirements: &lt;/strong&gt; &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; 8 to 10 years of exp with hand on experience on Oracle PL/SQL &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Willingness to learn and understand the business domain &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Hands-on experience with Power BI for data visualization and reporting. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Ability to meet client needs without sacrificing deadlines and quality &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Ability to work effectively within global team &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Excellent communication and teamwork skills &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Great attention to detail &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Analytical mind &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Degree in Computer Science, Statistics or relevant field &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;div&gt; &lt;div&gt; &lt;div&gt; &lt;b&gt; &lt;b&gt; Mandatory Competencies &lt;/b&gt; &lt;/b&gt; &lt;br&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; Database - Database Programming - PL/SQL &lt;/li&gt; &lt;li&gt; Database - Oracle - PL/SQL Packages &lt;/li&gt; &lt;li&gt; Operating System - Operating System - Unix &lt;/li&gt; &lt;li&gt; Operating System - Operating System - Linux &lt;/li&gt; &lt;li&gt; Beh - Communication &lt;/li&gt; &lt;li&gt; BI and Reporting Tools - BI and Reporting Tools - Power BI &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;div&gt; &amp;nbsp; &lt;/div&gt; &lt;/div&gt; &lt;/div&gt; &lt;/div&gt; &lt;/span&gt; &lt;div&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Manager&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Iris Software&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Noida, Gurugram&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/818796/database-pl-sql-lead-at-iris-software/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/818796/database-pl-sql-lead-at-iris-software/</link>
  <title>[Full Time] Database PL/SQL  - Lead at Iris Software</title>
  <dc:date>Thu, 02 Apr 2026 12:43:39 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/819541/senior-software-engineer-at-ltimindtree/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;Job Title: Senior Software Engineer (AS/400 Developer) Location: PAN India Job Summary: We are seeking a Senior AS400 Developer with 3 to 7 years of experience in RPG and CL programming. The successful candidate will be responsible for developing and maintaining AS400-based applications. This role requires an individual who is knowledgeable about the latest AS400 OS versions, programming language enhancements, and industry best practices. The Senior Software Engineer will primarily develop and maintain programming code for new and existing software on the AS400 platform. The role requires the utilization of RPGLE, including free form and embedded SQL, and CL400 for application development. The candidate will be expected to follow established programming standards and guidelines to ensure high code quality. The candidate will also need to understand the system architecture to support software updates and customer support during the initial learning phase. The role requires considerable discretion in handling sensitive or confidential data. The Senior Software Engineer will be expected to collaborate with team members to ensure high-quality deliverables and efficient development processes. Roles and Responsibilities: - Design, code, test, and debug RPG and CL programs on the AS400 system.&lt;br&gt;- Participate in all phases of the Software Development Life Cycle (SDLC).&lt;br&gt;- Provide technical support and troubleshooting for AS400 applications.&lt;br&gt;- Work closely with business analysts and other developers to translate requirements into functional applications.&lt;br&gt;- Ensure the timely completion of assigned tasks to meet project deadlines.&lt;br&gt;- Maintain positive contributions as a member of the programming team.&lt;br&gt;- Assist in maintaining knowledge of other computer systems and data storage types relevant to AS400 environments.&lt;br&gt;- Support integration efforts with other systems as needed.&lt;/p&gt; &lt;p&gt;Mandatory Skills:&lt;/p&gt; &lt;p&gt;- Proficiency in AS/400, AS400 - RPG, CL/400.&lt;br&gt;- Strong understanding of AS400 OS versions and programming language enhancements.&lt;br&gt;- Knowledge of industry best practices regarding AS400 development.&lt;/p&gt; &lt;p&gt;Qualifications:&lt;/p&gt; &lt;p&gt;- Must have 3 to 7 years of experience in RPG and CL programming.&lt;br&gt;- Proven experience in developing and maintaining AS400-based applications.&lt;br&gt;- Strong problem-solving skills.&lt;br&gt;- Excellent communication skills.&lt;br&gt;- Ability to work in a team environment.&lt;br&gt;- Ability to handle sensitive or confidential data with discretion.&lt;br&gt;- Proven ability to meet project deadlines.&lt;/p&gt; &lt;p&gt;We are committed to diversity and inclusivity in our team, and we encourage qualified individuals of all backgrounds to apply. &lt;/p&gt; &amp;lt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Ltimindtree&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Noida, Gurugram&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/819541/senior-software-engineer-at-ltimindtree/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/819541/senior-software-engineer-at-ltimindtree/</link>
  <title>[Full Time] Senior Software Engineer at Ltimindtree</title>
  <dc:date>Thu, 02 Apr 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/819525/senior-software-engineer-i-sql-developer-at-optum/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; &lt;strong&gt; Primary Responsibilities: &lt;/strong&gt; &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; Develop solid scalable and reusable Data pipelines to Extract, load and transform data into cloud based Datawarehouse Snowflake &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Develop complex SQL queries (using joins, analytic functions, aggregation functions, windows functions), ETL Jobs, Reports/Dashboards to meet complex business requirements and data needs &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; GitHub Copilot usage in software development Promote AI-first mindset in product development &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Collaborate with cross-functional teams to improve code quality, development practices, and processes &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Maintain development standards, perform critical development, and release operations Maintain Zero security violation (x-ray scan etc), Zero code vulnerabilities &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Troubleshoot and debug complex technical issues &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Conduct code reviews and provide constructive feedback &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Comply with the terms and conditions of the employment contract, company policies and procedures, and any and all directives (such as, but not limited to, transfer and/or re-assignment to different work locations, change in teams and/or work shifts, policies in regards to flexibility of work benefits and/or work environment, alternative work arrangements, and other decisions that may arise due to the changing business environment). The Company may adopt, vary or rescind these policies and directives in its absolute discretion and without any limitation (implied or otherwise) on its ability to do so &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;strong&gt; Required Qualifications: &lt;/strong&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; AI-Powered Solutions Development - Experience in developing and deploying AI-powered solutions using no-code, low-code, and advanced platforms, translating business needs into scalable solutions &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Good understanding of Database concepts &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Expert/ln-depth knowledge Of SQL: Building, debugging and tuning complex queries, business rules engines &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Knowledge in Dimensional Data Modelling and Semantic layer building for DWH, Reporting and Analytics &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;strong&gt; Technical Skills &lt;/strong&gt; &lt;/p&gt; &lt;p&gt; &lt;strong&gt; &lt;span&gt; Solid knowledge of: &lt;/span&gt; &lt;/strong&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; SQL, &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Datawarehouse &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; ETL &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Database concepts &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; SQL performance tuning &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Reporting and Analytics &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Cloud platform &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Agile methodologies &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;/ul&gt; &lt;p&gt; &lt;strong&gt; Familiar with: &lt;/strong&gt; &lt;/p&gt; &lt;ul&gt; &lt;strong&gt; &lt;/strong&gt; &lt;li&gt; &lt;strong&gt; &lt;/strong&gt; &lt;span&gt; Cloud based Datawarehouse (Preferred Snowflake) &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Cloud based Reporting/Analytics platform (Preferred Looker) &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; ETL and Integration tools (Preferred Snaplogic) &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Ansible and Jenkins for CICD &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Cloud platform (Preferred Azure) &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; AI technologies &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; Demonstrated ability to create big complex SQL queries (using joins, analytic functions, aggregration functions, windows functions) to load, transform and present data to downstream consumers to meet business data needs &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Demonstrated ability to prepare data as part Of an ETL or ELT process, and perform transform-load in cloud based Datawarehouse &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Proven efficient at resource optimization, analytics, and SQL performance tuning &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Proven solid programming and algorithmic skills &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;p&gt; &lt;span&gt; &lt;i&gt; &lt;/i&gt; &lt;/span&gt; &lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Retail&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Optum&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Chennai&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/819525/senior-software-engineer-i-sql-developer-at-optum/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/819525/senior-software-engineer-i-sql-developer-at-optum/</link>
  <title>[Full Time] Senior Software Engineer I - SQL Developer at Optum</title>
  <dc:date>Thu, 02 Apr 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/818460/hiring-grafana-developer-at-2coms/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;SUMMARY&lt;br&gt;&lt;span&gt; &lt;div&gt; Summary:&lt;br&gt; &lt;/div&gt; &lt;div&gt; We are seeking skilled Grafana developers to design and implement high-impact monitoring dashboards that drive operational visibility and system reliability. The ideal candidate will leverage Grafana, Kibana, and Logstash Grok to transform complex telemetry data into actionable insights. With expertise in PromQL and LogQL, you will enable advanced data correlation and real-time observability. You will play a key role in establishing robust alerting strategies and applying industry-standard observability frameworks such as RED (Rate, Errors, Duration) and USE (Utilization, Saturation, Errors) to optimize system performance and incident response. This role is critical to maintaining the health, scalability, and resilience of our infrastructure.&lt;br&gt; &lt;/div&gt; &lt;div&gt; &lt;br&gt; &lt;/div&gt; &lt;div&gt; &lt;br&gt; &lt;/div&gt; &lt;div&gt; Responsibilities:&lt;br&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt;Design and develop detailed, interactive dashboards in Grafana using advanced visualization techniques.&lt;br&gt;&lt;/li&gt; &lt;li&gt;Integrate and process log and metric data from sources including Prometheus, Loki, Kibana, and Logstash Grok.&lt;br&gt;&lt;/li&gt; &lt;li&gt;Implement and manage complex alerting rules using Grafana Alerting, including custom metrics and multi-dimensional conditions.&lt;br&gt;&lt;/li&gt; &lt;li&gt;Apply observability best practices such as RED and USE methodologies to monitor and improve system performance.&lt;br&gt;&lt;/li&gt; &lt;li&gt;Optimize query performance using PromQL for Prometheus and LogQL for Loki, including advanced data correlation and filtering.&lt;br&gt;&lt;/li&gt; &lt;li&gt;Collaborate with engineering and operations teams to align monitoring solutions with business and technical objectives.&lt;br&gt;&lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;div&gt; &lt;br&gt; &lt;/div&gt; &lt;/span&gt; &lt;br&gt; &lt;span&gt; Requirements&lt;p&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;We are looking for Grafana developers who can design detailed dashboards using Grafana editor, Kibana, Logstash Grok.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;br&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt;&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt; &lt;span&gt; &lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&lt;span&gt; &amp;nbsp;&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt;&lt;b&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;PromQL &amp;amp; LogQL:&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/b&gt; &lt;span&gt;&lt;span&gt;&lt;span&gt; &amp;nbsp;Mastery of PromQL for Prometheus and LogQL for Loki, including advanced data correlation techniques.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;br&gt; &lt;/span&gt; &lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt; &lt;span&gt; &lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&lt;span&gt; &amp;nbsp;&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt;&lt;b&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Grafana Alerting:&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/b&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt; &amp;nbsp;Setting up complex alerting rules (e.g., alert on custom metrics, multi-dimensional alerting).&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;br&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt;&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt; &lt;span&gt; &lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt; &amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&lt;span&gt; &amp;nbsp;&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt;&lt;b&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt; Observability Methods:&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/b&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt; &amp;nbsp;Applying standard practices like&lt;span&gt; &amp;nbsp;&lt;/span&gt; &lt;b&gt; RED&lt;/b&gt;&amp;nbsp;(Rate, Errors, Duration) and&lt;span&gt; &amp;nbsp;&lt;/span&gt; &lt;b&gt; USE&lt;/b&gt; &amp;nbsp;(Utilization, Saturation, Errors) methods to dashboards.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;br&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt;&lt;/p&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt;Proven experience with Grafana dashboard development and configuration.&lt;br&gt;&lt;/li&gt; &lt;li&gt;Expertise in PromQL for Prometheus and LogQL for Loki, including advanced querying and correlation techniques.&lt;br&gt;&lt;/li&gt; &lt;li&gt;Hands - on experience with Kibana and Logstash Grok for log parsing and visualization.&lt;br&gt;&lt;/li&gt; &lt;li&gt;Strong understanding of observability principles, including RED and USE methods.&lt;br&gt;&lt;/li&gt; &lt;li&gt;Experience setting up and managing complex alerting rules in Grafana.&lt;br&gt;&lt;/li&gt; &lt;li&gt;Familiarity with monitoring infrastructure and distributed systems.&lt;br&gt;&lt;/li&gt; &lt;li&gt;Ability to translate technical data into clear, actionable insights for cross-functional teams.&lt;br&gt;&lt;/li&gt; &lt;/ul&gt; &lt;div&gt; Experience: 3-7 years&lt;br&gt; &lt;/div&gt; &lt;div&gt; &lt;br&gt; &lt;/div&gt; &lt;div&gt; Location: Mumbai (Thane)&lt;br&gt; &lt;/div&gt; &lt;div&gt; &lt;br&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;br&gt; &lt;/div&gt; &lt;/span&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;2coms&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Mumbai&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/818460/hiring-grafana-developer-at-2coms/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/818460/hiring-grafana-developer-at-2coms/</link>
  <title>[Full Time] Hiring  Grafana Developer at 2coms</title>
  <dc:date>Thu, 02 Apr 2026 03:13:53 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/818764/databricks-python-senior-engineer-at-iris-software/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;div&gt; &amp;nbsp; &lt;span&gt; &lt;strong&gt; Core skills required for the role : &lt;/strong&gt; &lt;div&gt; &lt;/div&gt; &lt;ol&gt; &lt;li&gt; &lt;span&gt; Databricks Level: Advanced &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; SQL (MSSQL Server) Joins, SQ optimization, basic knowledge of StoredProcedure, Functions &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; PySpark Level: Advanced &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Azure Delta lake &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Python Basic &lt;/span&gt; &lt;/li&gt; &lt;/ol&gt; &lt;div&gt; &lt;div&gt; &lt;div&gt; &lt;div&gt; &lt;b&gt; &lt;b&gt; Mandatory Competencies &lt;/b&gt; &lt;/b&gt; &lt;br&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; Big Data - Big Data - Pyspark &lt;/li&gt; &lt;li&gt; Data Science and Machine Learning - Data Science and Machine Learning - Databricks &lt;/li&gt; &lt;li&gt; Cloud - Azure - Azure Data Factory (ADF), Azure Databricks, Azure Data Lake Storage, Event Hubs, HDInsight &lt;/li&gt; &lt;li&gt; Database - Sql Server - DBA &lt;/li&gt; &lt;li&gt; Data Science and Machine Learning - Data Science and Machine Learning - Python &lt;/li&gt; &lt;li&gt; Beh - Communication &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;div&gt; &amp;nbsp; &lt;/div&gt; &lt;/div&gt; &lt;/div&gt; &lt;/div&gt; &lt;/span&gt; &lt;div&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Iris Software&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Noida, Gurugram&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/818764/databricks-python-senior-engineer-at-iris-software/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/818764/databricks-python-senior-engineer-at-iris-software/</link>
  <title>[Full Time] Databricks/Python - Senior Engineer at Iris Software</title>
  <dc:date>Thu, 02 Apr 2026 00:01:35 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/817961/murex-datamart-developer-at-luxoft/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt;&lt;strong&gt;Project description&lt;/strong&gt;&lt;p&gt; We are looking for an experienced Murex Datamart professional with strong technical, functional, and analytical skills. The candidate should have in-depth knowledge of the Murex Datamart architecture, hands-on experience with Unix and SQL, and strong understanding of financial product classes. The role involves Datamart development, reconciliation, reporting, performance optimization, and collaboration with front-office, risk, finance, and IT teams. &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;strong&gt;Responsibilities&lt;/strong&gt;&lt;/div&gt;&lt;ul&gt;&lt;li&gt; Strong end-to-end knowledge of Murex Datamart architecture and components. &lt;/li&gt;&lt;li&gt;Design, develop, enhance, and support Murex Datamart reports &lt;/li&gt;&lt;li&gt;Work on Datamart configuration. &lt;/li&gt;&lt;li&gt;Handle Datamart migrations, upgrades, and version changes. &lt;/li&gt;&lt;li&gt;Analyze and resolve Datamart data issues and performance bottlenecks. &lt;/li&gt;&lt;li&gt;Work closely with cross-functional teams to deliver system enhancements and new product onboarding. &lt;/li&gt;&lt;/ul&gt;&lt;div&gt;&lt;strong&gt;Skills&lt;/strong&gt;&lt;/div&gt;&lt;p&gt;&lt;strong&gt; Must have&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt; 2 to 4 Years in Murex Datamart, preferably within a banking environment. &lt;/li&gt;&lt;li&gt;Strong understanding of Murex trade lifecycle. &lt;/li&gt;&lt;li&gt;Functional exposure across all major product classes. &lt;/li&gt;&lt;li&gt;Proficiency in Unix/Linux fundamentals for system operations and scripting. &lt;/li&gt;&lt;li&gt;Should have a strong understanding of SQL for data extraction, analysis, and troubleshooting. &lt;/li&gt;&lt;li&gt;Experience with Murex upgrades and migrations. &lt;/li&gt;&lt;li&gt;Exposure to EOD / Batch processing. &lt;/li&gt;&lt;li&gt;Experience supporting regulatory or finance-driven reporting. &lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Nice to have&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Demonstrate ownership and responsibility in all assignments. &lt;/li&gt;&lt;li&gt;Bachelor&apos;&apos;s degree in Computer Engineering/ School of Computing/ Finance IT/ other related IT degrees would be preferred. &lt;/li&gt;&lt;/ul&gt;&lt;div&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Legal&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Luxoft&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/817961/murex-datamart-developer-at-luxoft/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/817961/murex-datamart-developer-at-luxoft/</link>
  <title>[Full Time] Murex Datamart Developer at Luxoft</title>
  <dc:date>Wed, 01 Apr 2026 13:48:23 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/819203/support-engineer-iii-at-amazon/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt; This role requires the flexibility to work 5 days a week (occasionally on weekends) on a rotational basis. AWS Support is 24x7x365 operations and work timings for this role is in India night time i.e. 10 PM to 6 AM IST or 1 PM to 10 PM IST. You are expected to work in night shifts hours based on business requirements. About the team &lt;br&gt; Diverse Experiences &lt;br&gt; Amazon values diverse experiences. Even if you do not meet all of the preferred qualifications and skills listed in the job description, we encourage candidates to apply. If your career is just starting, hasn t followed a traditional path, or includes alternative experiences, don t let it stop you from applying. Why AWS &lt;br&gt; Amazon Web Services (AWS) is the world s most comprehensive and broadly adopted cloud platform. We pioneered cloud computing and never stopped innovating that s why customers from the most successful startups to Global 500 companies trust our robust suite of products and services to power their businesses. &lt;br&gt; Work/Life Balance &lt;br&gt; We value work-life harmony. Achieving success at work should never come at the expense of sacrifices at home, which is why we strive for flexibility as part of our working culture. When we feel supported in the workplace and at home, there s nothing we can t achieve. &lt;br&gt; Inclusive Team Culture &lt;br&gt; AWS values curiosity and connection. Our employee-led and company-sponsored affinity groups promote inclusion and empower our people to take pride in what makes us unique. Our inclusion events foster stronger, more collaborative teams. Our continual innovation is fueled by the bold ideas, fresh perspectives, and passionate voices our teams bring to everything we do. &lt;br&gt; Mentorship and Career Growth &lt;br&gt; We re continuously raising our performance bar as we strive to become Earth s Best Employer. That s why you ll find endless knowledge-sharing, mentorship and other career-advancing resources here to help you develop into a better-rounded professional. - 2+ years of software development, or 2+ years of technical support experience &lt;br&gt; - Experience scripting in modern program languages &lt;br&gt; - Experience troubleshooting and debugging technical systems - Knowledge of web services, distributed systems, and web application development &lt;br&gt; - Experience troubleshooting &amp;amp; maintaining hardware &amp;amp; software RAID &lt;br&gt; - Experience with REST web services, XML, JSON &lt;br&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Internet&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Amazon&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/819203/support-engineer-iii-at-amazon/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/819203/support-engineer-iii-at-amazon/</link>
  <title>[Full Time] Support Engineer III at Amazon</title>
  <dc:date>Wed, 01 Apr 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/819807/data-warehouse-testing-lead-at-infosys/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;ul&gt; &lt;li&gt; Design, plan, and execute test strategies and test plans for data warehouse and ETL components based on business and technical requirements &lt;/li&gt; &lt;li&gt; Validate data mapping, data transformations, aggregations, and data movement from source to staging, DWH, and downstream systems &lt;/li&gt; &lt;li&gt; Develop and maintain comprehensive test cases, test scenarios, and test data sets for DWT and ETL testing &lt;/li&gt; &lt;li&gt; Perform functional, regression, system integration, and user acceptance support testing for data warehouse solutions &lt;/li&gt; &lt;li&gt; Analyze complex SQL queries and datasets to identify data discrepancies, defects, and root causes &lt;/li&gt; &lt;li&gt; Collaborate with ETL developers, data modelers, and business analysts to clarify requirements and resolve defects efficiently &lt;/li&gt; &lt;li&gt; Establish and enforce quality standards, best practices, and processes for data testing within the team &lt;/li&gt; &lt;li&gt; Provide technical leadership and mentoring to junior testers, guiding them on DWT and ETL testing techniques and tools &lt;/li&gt; &lt;li&gt; Prepare clear test reports, defect metrics, and quality dashboards for stakeholders and project leadership &lt;/li&gt; &lt;li&gt; Participate in design and code review discussions from a quality perspective, highlighting potential data quality risks &lt;/li&gt; &lt;li&gt; Coordinate with cross-functional teams to ensure timely test execution and alignment with project timelines and release plans &lt;/li&gt; &lt;li&gt; Position will be based on business requirements across locations &lt;/li&gt; &lt;li&gt; Primary skills:Cloud testing-&amp;gt;AWS Testing,Data Services-&amp;gt;DWT (Data Warehouse Testing)/ (ETL),Data Services-&amp;gt;TDM (Test Data Management),Data Services-&amp;gt;TDM (Test Data Management)-&amp;gt;Delphix,Data Services-&amp;gt;TDM (Test Data Management)-&amp;gt;IBM Optim , Database-&amp;gt;PL / SQL , Package testing-&amp;gt;MDM,Python Desirables:Bigdata-&amp;gt;Python As a Quality Engineering Lead in Data Warehouse Testing, you will be at the heart of ensuring that business-critical data is accurate, reliable, and ready for insight-driven decisions &lt;/li&gt; &lt;li&gt; You will lead testing efforts across complex DWH and ETL ecosystems, collaborating closely with data engineers, BI developers, and business stakeholders to validate data flows end-to-end &lt;/li&gt; &lt;li&gt; This role offers the opportunity to shape test strategies, introduce best practices, and mentor a team of testers while working on large-scale, enterprise-grade data platforms &lt;/li&gt; &lt;li&gt; If you enjoy solving data quality challenges, designing robust test frameworks, and driving continuous improvement in a collaborative environment, this position will allow you to make a visible impact on how data powers the organization s success &lt;/li&gt; &lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Infosys&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/819807/data-warehouse-testing-lead-at-infosys/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/819807/data-warehouse-testing-lead-at-infosys/</link>
  <title>[Full Time] Data Warehouse Testing Lead at Infosys</title>
  <dc:date>Wed, 01 Apr 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/819542/senior-integration-developer-oracle-integration-cloud-oracle-epm-at-fujitsu/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;div&gt; &lt;div&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;div&gt; &lt;div&gt; &lt;span&gt; &lt;div&gt; &lt;span&gt; &lt;strong&gt; Senior Integration Developer (Oracle Integration Cloud &amp;amp; Oracle EPM) &lt;br&gt; Exp : 7+ years &lt;br&gt; Shift : UK Shift ( 2:30-11:30 pm) &lt;/strong&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;strong&gt; Location: Pune, Noida, Bangalore, Chennai, Hyderabad &lt;/strong&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;strong&gt; The Opportunity: &lt;/strong&gt; &lt;/span&gt; &lt;br&gt; &lt;span&gt; We are seeking a highly skilled and experienced Senior Integration Developer with a strong background in Oracle Integration Cloud (OIC) and deep expertise in Oracle Enterprise Performance Management (EPM) modules. This critical role involves designing, developing, and implementing robust integration solutions that connect our Oracle EPM systems (e.g., Planning Budgeting Cloud Service (PBCS), Financial Consolidation and Close Cloud Service (FCCS), Account Reconciliation Cloud Service (ARCS), Enterprise Data Management Cloud Service (EDMCS)) with other enterprise applications. You will ensure the efficient and accurate flow of financial, operational, and master data, playing a pivotal role in transforming our integration landscape and supporting our EPM initiatives. &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;strong&gt; Key Responsibilities &lt;/strong&gt; &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; Own integration solution architecture and integration strategy for assigned programs/projects. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Translate business/functional requirements into scalable integration designs: interface contracts, payload models, mappings, and non-functional requirements. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Design and build OIC integrations (orchestrations, routing, transformations, enrichment, validations) using reusable components and standards. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Implement secure connectivity and authentication: OAuth 2.0/2.1, Basic, certificates, keys, wallets; ensure compliance with enterprise security controls. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Develop file-based integrations using SFTP/FTP, CSV/XML, and Oracle Fusion load patterns such as FBDI/BIP/UCM (as applicable). &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Define and implement robust operational patterns: error handling framework, exception routing, retries, replay/reprocessing, and idempotency. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Drive testing strategy: unit tests, SIT coordination, test data strategy, defect triage and fixes, and UAT support. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Own deployment and release management: versioning, packaging, environment promotion (DEV/SIT/UAT/PROD), and rollback readiness. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Set up monitoring and operational readiness: integration tracking, alerts, dashboards, runbooks, and support handover/KT. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Perform code reviews and enforce engineering standards for performance, resiliency, maintainability, and documentation. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Mentor junior developers and lead technical discussions with stakeholders (business, functional, security, infra, and external vendors). &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;span&gt; &lt;strong&gt; Must-Have Technical Skills &lt;/strong&gt; &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; Oracle Integration Cloud (OIC): connections, integrations, schedules, lookups, libraries, packages, monitoring and troubleshooting. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Strong API engineering: REST/SOAP, JSON/XML, WSDL/OpenAPI familiarity, pagination, throttling, error semantics. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Data transformation skills: mapping best practices, XSLT where needed, canonical model approach, and schema validation. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; File-based integration: SFTP polling patterns, large file handling, chunking/batching, and Fusion load patterns (FBDI/BIP/UCM where applicable). &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Security fundamentals: OAuth, JWT concepts, client credentials, mutual TLS/certificates, secrets handling, and least-privilege access. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Operational excellence: logging, correlation IDs, retry strategy, dead-letter/error queues (conceptually), support runbooks. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Good working knowledge of SQL for data verification, reconciliation, and troubleshooting. &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;span&gt; &lt;strong&gt; What You Will Do: &lt;/strong&gt; &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; &lt;strong&gt; Design &amp;amp; Development: &lt;/strong&gt; Lead the design, development, and implementation of complex integration solutions using Oracle Integration Cloud (OIC) for various enterprise applications, with a primary focus on Oracle EPM modules (PBCS, FCCS, ARCS, EDMCS, etc.). &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;strong&gt; Technical Expertise: &lt;/strong&gt; Leverage your in-depth knowledge of OIC capabilities, including integrations, process automation (orchestrations), and visual builder to create efficient and scalable solutions. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;strong&gt; Oracle EPM Integration: &lt;/strong&gt; Work closely with Finance, Accounting, and Planning teams, as well as functional consultants, to understand business requirements, identify integration points, and translate them into technical specifications for Oracle EPM module integrations. This includes managing data loads, metadata synchronization, and inter-system data transfers critical for financial planning, budgeting, forecasting, consolidation, and reporting cycles. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;strong&gt; API Development &amp;amp; Management: &lt;/strong&gt; Develop, configure, and manage APIs within OIC for various integration patterns, interacting with Oracle EPM APIs and other source/target system APIs. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;strong&gt; Troubleshooting &amp;amp; Support: &lt;/strong&gt; Provide expert-level support, troubleshoot complex integration issues related to EPM data flows, and optimize existing integrations for performance and reliability. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;strong&gt; Best Practices: &lt;/strong&gt; Advocate for and implement integration best practices, coding standards, and security guidelines within the OIC platform, particularly concerning sensitive financial data. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;strong&gt; Documentation: &lt;/strong&gt; Create comprehensive technical design documents, mapping specifications, and support documentation for all integration solutions. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;strong&gt; Collaboration: &lt;/strong&gt; Collaborate with cross-functional teams, including architects, functional consultants, business users, and other developers, to deliver integrated solutions. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;strong&gt; Mentorship: &lt;/strong&gt; Mentor junior developers and contribute to the continuous improvement of our integration development processes. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;strong&gt; Stay Current: &lt;/strong&gt; Keep abreast of the latest OIC features, Oracle EPM Cloud updates, and industry integration trends in financial systems. &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;span&gt; &lt;strong&gt; What You Bring: &lt;/strong&gt; &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; &lt;strong&gt; Experience: &lt;/strong&gt; 7+ years of experience in enterprise application integration development, with at least 3-5 years specifically focused on Oracle Integration Cloud (OIC). &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;strong&gt; Oracle EPM Domain Expertise: &lt;/strong&gt; Proven experience integrating with and understanding the data models, business processes, and functionalities of multiple Oracle EPM Cloud modules (e.g., PBCS, FCCS, ARCS, EDMCS, Hyperion Planning, HFM). &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;strong&gt; OIC Proficiency: &lt;/strong&gt; Hands-on experience with OIC components such as Integrations (App Driven, Scheduled, File-based), Process Automation, Visual Builder, and Adapters (e.g., Oracle ERP Cloud Adapter, REST Adapter, SOAP Adapter, FTP Adapter, Database Adapter, EPM Cloud Adapter if available/used). &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;strong&gt; Technical Skills: &lt;/strong&gt; &lt;/span&gt; &lt;/li&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; Strong proficiency in XML, XSLT, JSON, and REST/SOAP web services. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Experience with SQL and PL/SQL for data manipulation and validation, especially with Oracle databases. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Familiarity with security concepts in integrations (e.g., OAuth, Basic Authentication). &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;li&gt; &lt;span&gt; &lt;strong&gt; Cloud Experience: &lt;/strong&gt; Understanding of cloud-native concepts and experience working with cloud platforms (preferably Oracle Cloud Infrastructure). &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;strong&gt; Problem-Solving: &lt;/strong&gt; Excellent analytical and problem-solving skills with the ability to diagnose and resolve complex integration issues. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;strong&gt; Communication: &lt;/strong&gt; Strong verbal and written communication skills to effectively interact with technical and non-technical stakeholders. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;strong&gt; Team Player: &lt;/strong&gt; Ability to work independently and collaboratively in a fast-paced environment. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;strong&gt; Education: &lt;/strong&gt; Bachelor&apos;&apos;s degree in Computer Science, Information Technology, Finance, or a related field. &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;span&gt; &lt;strong&gt; Bonus Points (Nice-to-Have): &lt;/strong&gt; &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; Oracle Cloud Certification (e.g., Oracle Integration Cloud Specialist, EPM Specialist certifications). &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Experience with other Oracle SaaS applications (e.g., ERP Cloud, HCM Cloud, SCM Cloud). &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Knowledge of Data Management tools within EPM (e.g., Data Management/FDMEE) and integrating with them via OIC. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Experience with continuous integration/continuous deployment (CI/CD) pipelines for OIC. &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/span&gt; &lt;/div&gt; &lt;/div&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;div&gt; &lt;div&gt; &lt;/div&gt; &lt;/div&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;div&gt; &lt;div&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/div&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Fujitsu&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Noida, Gurugram&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/819542/senior-integration-developer-oracle-integration-cloud-oracle-epm-at-fujitsu/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/819542/senior-integration-developer-oracle-integration-cloud-oracle-epm-at-fujitsu/</link>
  <title>[Full Time] Senior Integration Developer (oracle Integration Cloud &amp;amp; Oracle Epm) at Fujitsu</title>
  <dc:date>Wed, 01 Apr 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/819181/support-engineer-iii-at-amazon/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt; As a Support Engineer in Infrastructure Automation Tech Operations team, you seek resolution to problems and mitigate risk, always ensuring a Customer Obsessed experience has occurred. You will be working on services with a direct impact on the customer experience. If you are excited about the opportunity to learn and work on distributed systems, enjoy trouble shooting and solving complex problems, consider the opportunities to work with Amazon Physical Stores. You will help solve a variety of challenges and offer your expertise in growing the knowledge of your peers via team collaboration. You will be counted on to identify areas of improvement and drive projects to implement them. We consistently whiteboard so be comfortable writing and supporting your ideas on the team board. You will play an active role in defining the support processes for technologies in partnership with other technology leaders within and possibly outside the team. You should be comfortable with a level of ambiguity that s higher than most projects and relish the idea of solving big challenges. You will also mentor other engineers in your area of expertise. Along the way, we guarantee that you ll work hard, have fun and impact many customers! This role requires the flexibility to work 5 days a week (occasionally on weekends) on a rotational basis. AWS Support is 24x7x365 operations and work timings for this role is in India night time i.e. 10 PM to 6 AM IST or 1 PM to 10 PM IST. You are expected to work in night shifts hours based on business requirements. About the team &lt;br&gt; Diverse Experiences &lt;br&gt; Amazon values diverse experiences. Even if you do not meet all of the preferred qualifications and skills listed in the job description, we encourage candidates to apply. If your career is just starting, hasn t followed a traditional path, or includes alternative experiences, don t let it stop you from applying. Why AWS &lt;br&gt; Amazon Web Services (AWS) is the world s most comprehensive and broadly adopted cloud platform. We pioneered cloud computing and never stopped innovating that s why customers from the most successful startups to Global 500 companies trust our robust suite of products and services to power their businesses. &lt;br&gt; Work/Life Balance &lt;br&gt; We value work-life harmony. Achieving success at work should never come at the expense of sacrifices at home, which is why we strive for flexibility as part of our working culture. When we feel supported in the workplace and at home, there s nothing we can t achieve. &lt;br&gt; Inclusive Team Culture &lt;br&gt; AWS values curiosity and connection. Our employee-led and company-sponsored affinity groups promote inclusion and empower our people to take pride in what makes us unique. Our inclusion events foster stronger, more collaborative teams. Our continual innovation is fueled by the bold ideas, fresh perspectives, and passionate voices our teams bring to everything we do. &lt;br&gt; Mentorship and Career Growth &lt;br&gt; We re continuously raising our performance bar as we strive to become Earth s Best Employer. That s why you ll find endless knowledge-sharing, mentorship and other career-advancing resources here to help you develop into a better-rounded professional. - 2+ years of software development, or 2+ years of technical support experience &lt;br&gt; - Experience scripting in modern program languages &lt;br&gt; - Experience troubleshooting and debugging technical systems - Knowledge of web services, distributed systems, and web application development &lt;br&gt; - Experience troubleshooting &amp;amp; maintaining hardware &amp;amp; software RAID &lt;br&gt; - Experience with REST web services, XML, JSON &lt;br&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Internet&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Amazon&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/819181/support-engineer-iii-at-amazon/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/819181/support-engineer-iii-at-amazon/</link>
  <title>[Full Time] Support Engineer III at Amazon</title>
  <dc:date>Wed, 01 Apr 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/818013/duckcreek-policy-developer-at-coforge/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;Looking for a strong DCT Policy developer with 3-10 years of experience who has hands-on experience working in Example Author, Server, Express, Forms, Rating, Batch Processing, Task Creation, Transact, Address Validation.&lt;/p&gt; &lt;p&gt;&lt;strong&gt;Requirements:&lt;/strong&gt;&lt;/p&gt; &lt;p&gt; Candidate should have strong experience on Duckcreek.&lt;/p&gt; &lt;p&gt; Candidate should have strong experience on Policy /PAS / Policy Centre.&lt;/p&gt; &lt;p&gt; Candidate should strong experience on Duckcreek Example Platform.&lt;/p&gt; &lt;p&gt; Good understanding of underwriting, rating, insurance rules, forms.&lt;/p&gt; &lt;p&gt; Good Knowledge of Policy life cycle and various Policy Transactions&lt;/p&gt; &lt;p&gt; Hands-on experience working in Example Author, Server, Express, Forms, Rating, Batch Processing, Task Creation, Transact, Address Validation.&lt;/p&gt; &lt;p&gt; Good Knowledge of Duck Creek Policy System and workflow.&lt;/p&gt; &lt;p&gt; Experience in P&amp;amp;C insurance domain.&lt;/p&gt; &lt;p&gt; Good Understanding of business, functional requirements&lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Coforge&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/818013/duckcreek-policy-developer-at-coforge/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/818013/duckcreek-policy-developer-at-coforge/</link>
  <title>[Full Time] Duckcreek Policy Developer at Coforge</title>
  <dc:date>Wed, 01 Apr 2026 08:32:48 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/817792/staff-database-reliability-engineer-in-oracle-cloud-at-rackspace-technology/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;ul&gt; &lt;li&gt;&lt;p&gt;&lt;b&gt;What Were Looking For&lt;/b&gt;&lt;/p&gt;&lt;/li&gt; &lt;li&gt;&lt;p&gt;&lt;b&gt;Someone who can work from Hyderabad - Work from Office &lt;/b&gt;&lt;/p&gt;&lt;/li&gt; &lt;li&gt;8-10+ years in DBA / Platform Engineering&lt;/li&gt; &lt;li&gt;Strong multi-cloud experience (Azure / AWS / GCP  at least two)&lt;/li&gt; &lt;li&gt;Deep HA/DR &amp;amp; performance tuning expertise&lt;/li&gt; &lt;li&gt;Automation-first mindset (Terraform, scripting, CI/CD)&lt;/li&gt; &lt;li&gt;Experience in SaaS/DBaaS environments preferred&lt;/li&gt; &lt;li&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;For a &lt;b&gt;Site Reliability Engineer (SRE)&lt;/b&gt; in a &lt;b&gt;DBaaS (Database-as-a-Service) support&lt;/b&gt; role, the following &lt;b&gt;mandatory skills&lt;/b&gt; are typically required:&lt;/p&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;&lt;b&gt;1. Database Administration (DBA) Skills&lt;/b&gt;&lt;/p&gt;&lt;/li&gt; &lt;li&gt;&lt;b&gt;Primary Database: &lt;/b&gt;Oracle&lt;/li&gt; &lt;li&gt;&lt;b&gt;Secondary Database: &lt;/b&gt;MySQL, PostgreSQL, MS SQL Server&lt;/li&gt; &lt;li&gt;&lt;b&gt;Database Backup &amp;amp; Recovery:&lt;/b&gt; Tools and strategies for database backups and disaster recovery.&lt;/li&gt; &lt;li&gt;&lt;b&gt;Performance Tuning:&lt;/b&gt; Query optimization, indexing strategies, and database performance troubleshooting.&lt;/li&gt; &lt;li&gt;&lt;b&gt;Database Security:&lt;/b&gt; User management, roles, access control, and auditing.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;/p&gt;&lt;ul&gt; &lt;li&gt;&lt;p&gt;&lt;b&gt;2. Cloud Infrastructure Knowledge (DBaaS)&lt;/b&gt;&lt;/p&gt;&lt;/li&gt; &lt;li&gt;&lt;b&gt;Cloud Platforms:&lt;/b&gt; AWS (RDS, Aurora), Azure (Cosmos DB, SQL Database), GCP (Cloud SQL, Firestore).&lt;/li&gt; &lt;li&gt;&lt;b&gt;Infrastructure as Code (IaC):&lt;/b&gt; Terraform, CloudFormation, Kubernetes.&lt;/li&gt; &lt;li&gt;&lt;b&gt;Kubernetes &amp;amp; Containers:&lt;/b&gt; Running databases in containers (like Kubernetes).&lt;/li&gt; &lt;li&gt;&lt;b&gt;Observability Tools:&lt;/b&gt; ELK stack (Elasticsearch, Logstash, Kibana)&lt;/li&gt; &lt;li&gt;&lt;b&gt;Database Migration:&lt;/b&gt; Migrating databases across different platforms or cloud environments.&lt;/li&gt; &lt;li&gt;&lt;b&gt;Database Scaling:&lt;/b&gt; Vertical and horizontal scaling techniques in cloud environments.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;/p&gt;&lt;ul&gt; &lt;li&gt;&lt;p&gt;&lt;b&gt;3. SRE Principles (Site Reliability Engineering)&lt;/b&gt;&lt;/p&gt;&lt;/li&gt; &lt;li&gt;&lt;b&gt;Incident Management:&lt;/b&gt; Handling database outages, incident response, and on-call rotations.&lt;/li&gt; &lt;li&gt;&lt;b&gt;Monitoring and Alerting:&lt;/b&gt; Tools like Prometheus, Grafana, Datadog, CloudWatch.&lt;/li&gt; &lt;li&gt;&lt;b&gt;Service Level Objectives (SLOs) / Service Level Agreements (SLAs):&lt;/b&gt; Ensuring uptime and performance targets.&lt;/li&gt; &lt;li&gt;&lt;b&gt;Disaster Recovery Planning:&lt;/b&gt; Ensuring high availability (HA) and disaster recovery (DR) solutions.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;/p&gt;&lt;ul&gt; &lt;li&gt;&lt;p&gt;&lt;b&gt;4. Scripting and Automation&lt;/b&gt;&lt;/p&gt;&lt;/li&gt; &lt;li&gt;&lt;b&gt;Scripting Languages:&lt;/b&gt; Python, Shell scripting, Bash, PowerShell.&lt;/li&gt; &lt;li&gt;&lt;b&gt;Automation Tools:&lt;/b&gt; Ansible, Puppet, Chef.&lt;/li&gt; &lt;li&gt;&lt;b&gt;Infrastructure Automation:&lt;/b&gt; Automating database deployment, patching, and scaling.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;/p&gt;&lt;ul&gt; &lt;li&gt;&lt;p&gt;&lt;b&gt;5. Networking and Infrastructure&lt;/b&gt;&lt;/p&gt;&lt;/li&gt; &lt;li&gt;&lt;b&gt;Networking Basics:&lt;/b&gt; TCP/IP, DNS, Firewall, Load Balancers.&lt;/li&gt; &lt;li&gt;&lt;b&gt;Database Connectivity:&lt;/b&gt; Connection pooling, failover strategies, and multi-region deployment.&lt;/li&gt; &lt;li&gt;&lt;b&gt;Storage and Disk Management:&lt;/b&gt; Understanding IOPS, latency, and throughput.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;/p&gt;&lt;ul&gt; &lt;li&gt;&lt;p&gt;&lt;b&gt;6. OS Skills &lt;/b&gt;&lt;/p&gt;&lt;p&gt;&lt;b&gt;Expertise in Linux OS ( RHEL, UBunto, Centos)&lt;/b&gt;&lt;/p&gt;&lt;/li&gt; &lt;li&gt;Understanding of file systems (ext4, XFS, etc.), permissions, and ownership (chmod, chown, ACLs).&lt;/li&gt; &lt;li&gt;Knowledge of process monitoring, management, and troubleshooting (ps, top, htop, kill, pkill, etc.).&lt;/li&gt; &lt;li&gt;Proficiency with tools like&amp;nbsp;top,&amp;nbsp;htop,&amp;nbsp;vmstat,&amp;nbsp;iostat,&amp;nbsp;sar, and&amp;nbsp;dstat&amp;nbsp;to monitor CPU, memory, disk I/O, and network usage.&lt;/li&gt; &lt;li&gt;Ability to analyze system logs (/var/log/,&amp;nbsp;journalctl,&amp;nbsp;dmesg) for troubleshooting.&lt;/li&gt; &lt;li&gt;Understanding of resource limits (CPU, memory, disk, network) and how they impact database performance.&lt;/li&gt; &lt;li&gt;Knowledge of partitioning tools (fdisk,&amp;nbsp;parted) and file system management (mkfs,&amp;nbsp;mount,&amp;nbsp;umount).&lt;/li&gt; &lt;li&gt;Understanding of RAID configurations and Logical Volume Management (LVM) for storage scalability.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;span&gt;&lt;b&gt;About Rackspace Technology&lt;/b&gt;&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;We are the multicloud solutions experts. We combine our expertise with the worlds leading technologies  across applications, data and security  to deliver end-to-end solutions. We have a proven record of advising customers based on their business challenges, designing solutions that scale, building and managing those solutions, and optimizing returns into the future. Named a best place to work, year after year according to Fortune, Forbes and Glassdoor, we attract and develop world-class talent. Join us on our mission to embrace technology, empower customers and deliver the future.&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;&lt;b&gt;More on Rackspace Technology&lt;/b&gt;&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;Though were all different, Rackers thrive through our connection to a central goal: to be a valued member of a winning team on an inspiring mission. We bring our whole selves to work every day. And we embrace the notion that unique perspectives fuel innovation and enable us to best serve our customers and communities around the globe. We welcome you to apply today and want you to know that we are committed to offering equal employment opportunity without regard to age, color, disability, gender reassignment or identity or expression, genetic information, marital or civil partner status, pregnancy or maternity status, military or veteran status, nationality, ethnic or national origin, race, religion or belief, sexual orientation, or any legally protected characteristic. If you have a disability or special need that requires accommodation, please let us know.&lt;/span&gt;&lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Oil &amp;amp; Gas&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Rackspace Technology&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Noida, Gurugram&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/817792/staff-database-reliability-engineer-in-oracle-cloud-at-rackspace-technology/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/817792/staff-database-reliability-engineer-in-oracle-cloud-at-rackspace-technology/</link>
  <title>[Full Time] Staff Database Reliability Engineer - IN (Oracle + Cloud) at Rackspace Technology</title>
  <dc:date>Wed, 01 Apr 2026 05:37:11 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/817761/oracle-ebs-bi-apps-obia-qe-testing-functional-technical-at-rackspace-technology/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&lt;b&gt;Oracle EBS/OBIA Quality Engineer (QE)&amp;nbsp;&lt;/b&gt;&lt;/p&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;&lt;b&gt;Experience Required:&lt;br&gt;8+ years of experience in Quality Engineering/Testing&lt;/b&gt;&lt;/p&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;&lt;b&gt;Job Description:&lt;/b&gt;&lt;/p&gt;&lt;p&gt;&lt;b&gt;Quality Engineer responsible for end-to-end testing of Oracle E-Business Suite (EBS) and Oracle Business Intelligence Application (OBIA) implementations and customizations, focusing on Financial &amp;amp; Procurement modules with strong SQL/PL-SQL skills.&lt;/b&gt;&lt;/p&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;&lt;b&gt;Candidates must have strong expertise in &lt;/b&gt;&lt;/p&gt;&lt;ul&gt; &lt;li&gt;SQL and PL-SQL Knowledge&lt;/li&gt; &lt;li&gt;Oracle E-Business Suite (EBS) - Financial &amp;amp; Procurement modules&lt;/li&gt; &lt;li&gt;Oracle Data Integrator (ODI) 11G &amp;amp; 12C&lt;/li&gt; &lt;li&gt;Oracle Business Intelligence Application (OBIA)&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;&lt;b&gt;Good to Know:&lt;/b&gt;&lt;/p&gt;&lt;ul&gt; &lt;li&gt;Oracle Business Intelligence Enterprise Edition (OBIEE) 12c&lt;/li&gt; &lt;li&gt;Oracle GoldenGate (OGG) Data Replication Process&lt;/li&gt; &lt;li&gt;Oracle Communications Data Model (OCDM)&lt;/li&gt; &lt;li&gt;Oracle Billing and Revenue Management (BRM)&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;&lt;b&gt;Must Know Skills&lt;/b&gt;&lt;/p&gt;&lt;ul&gt; &lt;li&gt;&lt;b&gt;Strong SQL and PL/SQL&lt;/b&gt; skills for data validation, query verification, and test data preparation&lt;/li&gt; &lt;li&gt;Detailed experience in Oracle E-Business Suite (EBS) application functional and testing knowledge on &lt;b&gt;Financial&lt;/b&gt; modules and &lt;b&gt;Procurement&lt;/b&gt; modules.&lt;/li&gt; &lt;li&gt;Experience in EBS-to-OBIA data integration testing and reconciliation&lt;/li&gt; &lt;li&gt;Strong understanding of data warehouse concepts (star schema, slowly changing dimensions, fact tables)&lt;/li&gt; &lt;li&gt;Knowledge of EBS tables, data models, and business processes for Financial and Procurement flows&lt;/li&gt; &lt;li&gt;Test financial period close processes and reconciliation report&lt;/li&gt; &lt;li&gt;Good Understanding on OBIA ETL jobs, SQL analysis&lt;/li&gt; &lt;li&gt;Conduct root cause analysis for critical defects to Identify performance bottlenecks and work with development team on optimization&lt;/li&gt;&lt;/ul&gt;&lt;ul&gt; &lt;li&gt;Create detailed test cases covering functional, integration, regression, and performance testing scenarios&lt;/li&gt;&lt;/ul&gt;&lt;ul&gt; &lt;li&gt;Ability to work closely with on-site/offshore development teams and business analysts&lt;/li&gt; &lt;li&gt;Ability to read and interpret technical specifications and design documents&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;&lt;b&gt;Good to have&lt;/b&gt;&lt;/p&gt;&lt;ul&gt; &lt;li&gt;Experience in Oracle Business Intelligence Enterprise Edition (OBIEE) to Test complex dashboard functionality, prompts, agents, and conditional formatting reports along with Session logs extraction and SQL Analysis to identify the data issues.&lt;/li&gt; &lt;li&gt;Develop comprehensive test plans and test strategies for OBIEE/ODI/OBIA implementations&lt;/li&gt; &lt;li&gt;Performance testing experience for OBIEE/ODI applications&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;&lt;b&gt;About Rackspace Technology&lt;/b&gt;&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;We are the multicloud solutions experts. We combine our expertise with the worlds leading technologies  across applications, data and security  to deliver end-to-end solutions. We have a proven record of advising customers based on their business challenges, designing solutions that scale, building and managing those solutions, and optimizing returns into the future. Named a best place to work, year after year according to Fortune, Forbes and Glassdoor, we attract and develop world-class talent. Join us on our mission to embrace technology, empower customers and deliver the future.&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;&lt;b&gt;More on Rackspace Technology&lt;/b&gt;&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;Though were all different, Rackers thrive through our connection to a central goal: to be a valued member of a winning team on an inspiring mission. We bring our whole selves to work every day. And we embrace the notion that unique perspectives fuel innovation and enable us to best serve our customers and communities around the globe. We welcome you to apply today and want you to know that we are committed to offering equal employment opportunity without regard to age, color, disability, gender reassignment or identity or expression, genetic information, marital or civil partner status, pregnancy or maternity status, military or veteran status, nationality, ethnic or national origin, race, religion or belief, sexual orientation, or any legally protected characteristic. If you have a disability or special need that requires accommodation, please let us know.&lt;/span&gt;&lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Oil &amp;amp; Gas&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Rackspace Technology&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Noida, Gurugram&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/817761/oracle-ebs-bi-apps-obia-qe-testing-functional-technical-at-rackspace-technology/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/817761/oracle-ebs-bi-apps-obia-qe-testing-functional-technical-at-rackspace-technology/</link>
  <title>[Full Time] Oracle Ebs / Bi Apps (obia) Qe Testing (functional &amp;amp; Technical) at Rackspace Technology</title>
  <dc:date>Tue, 31 Mar 2026 23:02:26 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/819374/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Fabric&lt;br&gt;&lt;b&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum 7.5 year(s) of experience is required&lt;br&gt;&lt;b&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:&lt;br&gt;As a Data Architect, a typical day involves defining the data requirements and designing the structure necessary for the application. This role includes modeling the data architecture, planning how data will be stored efficiently, and ensuring seamless integration across various components. The position requires a thoughtful approach to organizing data to support application functionality and scalability, collaborating with different stakeholders to align data strategies with project goals, and continuously refining data models to meet evolving needs.Key Responsibilities&lt;br&gt;1.Function as the Lead Data Architect for a small, simple project/proposal or as a team lead for medium/large sized project or proposal &lt;br&gt;2.Discuss specific Big data architecture and related issues with client architect/team (in area of expertise) &lt;br&gt;3.Experience in SaaS and PaaS Offerings of Fabric &lt;br&gt;4.Worked in implementation of Gen AI/Agentic AI use case in Fabric &lt;br&gt;5.Knowledge in LLM and Prompt engineering, AI foundry &lt;br&gt;6.Should have worked in Data governance Solution &lt;br&gt;7.Analyze and assess the impact of the requirements on the data and its lifecycle &lt;br&gt;8.Lead Big data architecture and design medium-big Cloud based, Big Data and Analytical Solutions using Lambda architecture. &lt;br&gt;9.Breadth of experience in various client scenarios and situations &lt;br&gt;10.Experienced in Big Data Architecture-based sales and delivery &lt;br&gt;11.Thought leadership and innovation &lt;br&gt;12.Lead creation of new data assets offerings &lt;br&gt;13.Experience in handling OLTP and OLAP data workloads &lt;br&gt; Technical Experience:&lt;br&gt;1.Strong Experience working in Fabric Lakehouse and Warehouse working on Shortcuts and Copilots. &lt;li&gt;&lt;br&gt;2.Good to have exposure or experience in Data science and Real time analytics . &lt;br&gt;3.Strong experience in Azure is preferred with hands-on experience in two or more of these skills :Azure Fabric, Azure Synapse Analytics, Azure Databricks with PySpark / Scala / SparkSQL, Purview / Unity catalog . &lt;br&gt;4.Exp in one or more Real-time/Streaming technologies including:Azure Stream Analytics, Azure Data Explorer, Azure Time Series Insights, etc. &lt;br&gt;5.Candidate should have deep knowledge in Python, LLM Open AI Foundry &lt;br&gt;6.Github Copilot experience is an added advantage &lt;br&gt;7.Exp in handling medium to large Big Data implementations &lt;br&gt;8.Candidate must have 10-12 years of IT experience and around 5 years of extensive Big data experience (design + build) &lt;br&gt;9.Architect for a medium sized client delivery project Professional Experience:&lt;br&gt;1.Should be able to drive the technology design meetings, propose technology design and architecture &lt;br&gt;2.Should have excellent client communication skills &lt;br&gt;3.Should have good analytical and problem-solving skills &lt;br&gt; Educational Qualification:&lt;br&gt;1.Must have:BE/BTech/MCA &lt;br&gt;2.Good to have:ME/MTech &lt;br&gt;&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;&lt;br&gt;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/819374/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/819374/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Tue, 31 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/819238/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Fabric&lt;br&gt;&lt;b&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum 5 year(s) of experience is required&lt;br&gt;&lt;b&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:&lt;br&gt;As a Data Architect, a typical day involves defining the data requirements and designing the structure necessary for the application. This role includes modeling the data architecture, planning how data will be stored efficiently, and ensuring seamless integration across various components. The position requires a thoughtful approach to organizing data to support application functionality and scalability, collaborating with different teams to align data strategies with project goals, and continuously refining data models to meet evolving needs.Key Responsibilities&lt;br&gt;1.Function as the Lead Data Architect for a small, simple project/proposal or as a team lead for medium/large sized project or proposal &lt;br&gt;2.Discuss specific Big data architecture and related issues with client architect/team (in area of expertise) &lt;br&gt;3.Experience in SaaS and PaaS Offerings of Fabric &lt;br&gt;4.Worked in implementation of Gen AI/Agentic AI use case in Fabric &lt;br&gt;5.Knowledge in LLM and Prompt engineering, AI foundry &lt;br&gt;6.Should have worked in Data governance Solution &lt;br&gt;7.Analyze and assess the impact of the requirements on the data and its lifecycle &lt;br&gt;8.Lead Big data architecture and design medium-big Cloud based, Big Data and Analytical Solutions using Lambda architecture. &lt;br&gt;9.Breadth of experience in various client scenarios and situations &lt;br&gt;10.Experienced in Big Data Architecture-based sales and delivery &lt;br&gt;11.Thought leadership and innovation &lt;br&gt;12.Lead creation of new data assets offerings &lt;br&gt;13.Experience in handling OLTP and OLAP data workloads &lt;br&gt; Technical Experience:&lt;br&gt;1.Strong Experience working in Fabric Lakehouse and Warehouse working on Shortcuts and Copilots. &lt;li&gt;&lt;br&gt;2.Good to have exposure or experience in Data science and Real time analytics . &lt;br&gt;3.Strong experience in Azure is preferred with hands-on experience in two or more of these skills :Azure Fabric, Azure Synapse Analytics, Azure Databricks with PySpark / Scala / SparkSQL, Purview / Unity catalog . &lt;br&gt;4.Exp in one or more Real-time/Streaming technologies including:Azure Stream Analytics, Azure Data Explorer, Azure Time Series Insights, etc. &lt;br&gt;5.Candidate should have deep knowledge in Python, LLM Open AI Foundry &lt;br&gt;6.Github Copilot experience is an added advantage 7.Exp in handling medium to large Big Data implementations &lt;br&gt;8.Candidate must have 7-10 years of IT experience and around 5 years of Big data experience (design + build) &lt;br&gt;9.Architect for a medium sized client delivery project Professional Experience:&lt;br&gt;1.Should be able to drive the technology design meetings, propose technology design and architecture &lt;br&gt;2.Should have excellent client communication skills &lt;br&gt;3.Should have good analytical and problem-solving skills &lt;br&gt; Educational Qualification:&lt;br&gt;1.Must have:BE/BTech/MCA &lt;br&gt;2.Good to have:ME/MTech &lt;br&gt;&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;&lt;br&gt;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/819238/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/819238/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Tue, 31 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/819216/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Fabric&lt;br&gt;&lt;b&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum 7.5 year(s) of experience is required&lt;br&gt;&lt;b&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:&lt;br&gt;As a Data Architect, a typical day involves defining the data requirements and designing the structure necessary for the application. This role includes modeling the data architecture, planning how data will be stored efficiently, and ensuring seamless integration across various components. The position requires a thoughtful approach to organizing data to support application functionality and scalability, collaborating with different stakeholders to align data strategies with project goals, and continuously refining data models to meet evolving needs.Key Responsibilities&lt;br&gt;1.Function as the Lead Data Architect for a small, simple project/proposal or as a team lead for medium/large sized project or proposal &lt;br&gt;2.Discuss specific Big data architecture and related issues with client architect/team (in area of expertise) &lt;br&gt;3.Experience in SaaS and PaaS Offerings of Fabric &lt;br&gt;4.Worked in implementation of Gen AI/Agentic AI use case in Fabric &lt;br&gt;5.Knowledge in LLM and Prompt engineering, AI foundry &lt;br&gt;6.Should have worked in Data governance Solution &lt;br&gt;7.Analyze and assess the impact of the requirements on the data and its lifecycle &lt;br&gt;8.Lead Big data architecture and design medium-big Cloud based, Big Data and Analytical Solutions using Lambda architecture. &lt;br&gt;9.Breadth of experience in various client scenarios and situations &lt;br&gt;10.Experienced in Big Data Architecture-based sales and delivery &lt;br&gt;11.Thought leadership and innovation &lt;br&gt;12.Lead creation of new data assets offerings &lt;br&gt;13.Experience in handling OLTP and OLAP data workloads &lt;br&gt; Technical Experience:&lt;br&gt;1.Strong Experience working in Fabric Lakehouse and Warehouse working on Shortcuts and Copilots. &lt;li&gt;&lt;br&gt;2.Good to have exposure or experience in Data science and Real time analytics . &lt;br&gt;3.Strong experience in Azure is preferred with hands-on experience in two or more of these skills :Azure Fabric, Azure Synapse Analytics, Azure Databricks with PySpark / Scala / SparkSQL, Purview / Unity catalog . &lt;br&gt;4.Exp in one or more Real-time/Streaming technologies including:Azure Stream Analytics, Azure Data Explorer, Azure Time Series Insights, etc. &lt;br&gt;5.Candidate should have deep knowledge in Python, LLM Open AI Foundry &lt;br&gt;6.Github Copilot experience is an added advantage &lt;br&gt;7.Exp in handling medium to large Big Data implementations &lt;br&gt;8.Candidate must have 10-12 years of IT experience and around 5 years of extensive Big data experience (design + build) &lt;br&gt;9.Architect for a medium sized client delivery project Professional Experience:&lt;br&gt;1.Should be able to drive the technology design meetings, propose technology design and architecture &lt;br&gt;2.Should have excellent client communication skills &lt;br&gt;3.Should have good analytical and problem-solving skills &lt;br&gt; Educational Qualification:&lt;br&gt;1.Must have:BE/BTech/MCA &lt;br&gt;2.Good to have:ME/MTech &lt;br&gt;&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;&lt;br&gt;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Pune&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/819216/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/819216/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Tue, 31 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/819299/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Azure Databricks&lt;br&gt;&lt;b&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum 5 year(s) of experience is required&lt;br&gt;&lt;b&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:&lt;br&gt;As a Data Architect, a typical day involves defining the data requirements and designing the structure necessary for the application. This role includes modeling the data architecture, planning how data will be stored efficiently, and ensuring seamless integration across various components. The position requires a thoughtful approach to organizing data to support application functionality and scalability, collaborating with different stakeholders to align data strategies with project goals, and continuously refining data models to meet evolving needs.Key Responsibilities&lt;br&gt;A.Function as the Lead Data Architect for a small, simple project/proposal or as a team lead for medium/large sized project or proposal &lt;br&gt;B.Discuss specific Big data architecture and related issues with client architect/team (in area of expertise) &lt;br&gt;C.Worked in implementation of Databricks Gen AI/Agentic AI use case &lt;br&gt;D.Knowledge in LLM and Prompt engineering, AI foundry &lt;br&gt;E.Candidate should have worked in Data governance Solution &lt;br&gt;F.Analyze and assess the impact of the requirements on the data and its lifecycle &lt;br&gt;G.Lead Big data architecture and design medium-big Cloud based, Data and Analytical Solutions using Lambda architecture. &lt;br&gt;H.Breadth of experience in various client scenarios and situations &lt;br&gt;I.Experienced in Big Data Architecture-based sales and delivery &lt;br&gt;J.Thought leadership and innovation &lt;br&gt;K.Lead creation of new data assets offerings &lt;br&gt;L.Experience in handling OLTP and OLAP data workloads &lt;br&gt; Technical Experience:&lt;br&gt;A.Experience working in Medallion architecture involving Delta lake house principles &lt;br&gt;B.Expert level in Designing and Architect solutions in Azure Databricks, Azure Data lake, Delta Lake implementation. &lt;br&gt;C.Experience in Databricks GenAI Implementation &lt;br&gt;D.Experience in Azure purview/Profisee/Unity Catalog &lt;br&gt;E.Well versed in Real time and batch streaming concepts and experience in its implementation &lt;br&gt;F. Expert level experience in Azure cloud technologies like PySpark, Databricks, Python, Scala and SQL. &lt;br&gt;G.Exp in one or more Real-time/batch ingestion including:Azure Delta live tables , Autoloader &lt;br&gt;H.Exp in handling medium to large Big Data implementations &lt;br&gt;I. Strong understanding of data strategy. Data Quality and Delta lake components &lt;br&gt;J.For Level 8 - Candidate must have 10-12 years of IT experience and around 5 years of extensive Big data experience (design + build) in Databricks &lt;br&gt;K.For Level 9 - Candidate must have 7-10 years of IT experience and around 5 years of Big data experience (design + build) in Databricks &lt;br&gt;L.Architect for a medium sized client delivery project Professional Experience:&lt;br&gt;A.Should be able to drive the technology design meetings, propose technology design and architecture &lt;br&gt;B.Should have excellent client communication skills &lt;br&gt;C.Should have good analytical and problem-solving skills &lt;br&gt; Educational Qualification:&lt;br&gt;A.Must have:BE/BTech/MCA &lt;br&gt;B.Good to have:ME/MTech &lt;br&gt;&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;&lt;br&gt;15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/819299/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/819299/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Tue, 31 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/818249/sap-hana-professional-at-cirruslabs/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;strong&gt; Experience; 6- 8 Years &lt;/strong&gt; &lt;br&gt; &lt;strong&gt; Location ; &lt;/strong&gt; &lt;/span&gt; &lt;/span&gt; &lt;strong&gt; Hyderabad &lt;/strong&gt; &amp;nbsp; &lt;span&gt; &amp;nbsp; &lt;span&gt; &amp;nbsp; &lt;/span&gt; &amp;nbsp; &lt;/span&gt; Role Description &lt;br&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; Senior developer with 6-8 years experience SAP &lt;br&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Enterprise HANA as primary skill and BW4 -based data warehousing/reporting solutions, including end-to-end modeling, data acquisition, performance optimization, and BEx (Query &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Designer/Analyzer) reporting support/enhancements. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Partner with business and technical teams to translate requirements into scalable analytics products. &lt;span&gt; &amp;nbsp; &lt;span&gt; &amp;nbsp; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Software Product&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Cirruslabs&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/818249/sap-hana-professional-at-cirruslabs/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/818249/sap-hana-professional-at-cirruslabs/</link>
  <title>[Full Time] SAP HANA Professional at Cirruslabs</title>
  <dc:date>Tue, 31 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/819242/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Azure Data Services&lt;br&gt;&lt;b&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum 7.5 year(s) of experience is required&lt;br&gt;&lt;b&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:&lt;br&gt;As a Data Architect, a typical day involves defining the data requirements and designing the structure necessary for the application. This role includes modeling the data architecture, planning how data will be stored efficiently, and ensuring seamless integration across various components. The position requires a thoughtful approach to organizing data to support application functionality and scalability, collaborating with different teams to align data strategies with project goals, and continuously refining data models to meet evolving needs.Key Responsibilities&lt;br&gt;A.Function as the Lead Data Architect for a small, simple project/proposal or as a team lead for medium/large sized project or proposal &lt;br&gt;B.Discuss specific data architecture and data related issues with client architect/team (in area of expertise) &lt;br&gt;C.Experienced in Gen AI/Agentic AI use case Implementation in medium to high scale project &lt;br&gt;D.Candidate should have worked in Data governance Solution &lt;br&gt;E.Knowledge in LLM and Prompt engineering &lt;br&gt;F.Analyze and assess the impact of the requirements on the data and its lifecycle &lt;br&gt;G.Lead the data architecture and design of complex, enterprise-level applications and systems &lt;br&gt;H.Breadth of experience in various client scenarios and situations &lt;br&gt;I.Experienced in Data Architecture-based sales and delivery &lt;br&gt;J.Thought leadership and innovation &lt;br&gt;K.Lead creation of new data assets offerings &lt;br&gt;L.Experience in handling OLTP data workloads &lt;br&gt; Technical Experience:&lt;br&gt;A.Strong experience in Azure is preferred with hands-on experience in 2 or more of these skills :Azure SQL DB ,Azure SQL Managed Instance ,Azure Data Lake Store, Azure Cosmos DB, Azure Database for PostgreSQL, Azure Database for MySQL &lt;br&gt;B.Experience in handling medium to large data migration projects &lt;br&gt;C.Candidate should have deep knowledge in Python and Large Language Model &lt;br&gt;D.Github Copilot experience is an added advantage &lt;br&gt;E.Experience in Azure purview/Profisee/Unity Catalog &lt;br&gt;F.Candidate must have around 5 years of extensive database experience (design + build) &lt;br&gt;J.Candidate must have 10-12 years of IT experience and around 5 years of extensive database experience (design + build)Architect for a medium sized client delivery project Professional Experience:&lt;br&gt;A.Should be able to drive the technology design meetings, propose technology design and architecture &lt;br&gt;B.Should have excellent client communication skills &lt;br&gt;C.Should have good analytical and problem-solving skills &lt;br&gt; Educational Qualification&lt;br&gt;A.Must have:BE/BTech/MCA &lt;br&gt;B.Good to have:ME/MTech &lt;br&gt;&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;&lt;br&gt;15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/819242/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/819242/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Tue, 31 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/819317/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Azure Data Services&lt;br&gt;&lt;b&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum 5 year(s) of experience is required&lt;br&gt;&lt;b&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:&lt;br&gt;As a Data Architect, a typical day involves defining the data requirements and designing the structure necessary for the application. This role includes modeling the data architecture, planning how data will be stored efficiently, and ensuring seamless integration across various components. The position requires a thoughtful approach to organizing data to support application functionality and scalability, collaborating with different stakeholders to align data strategies with project goals, and continuously refining data models to meet evolving needs.Key Responsibilities&lt;br&gt;A.Function as the Lead Data Architect for a small, simple project/proposal or as a team lead for medium/large sized project or proposal &lt;br&gt;B.Discuss specific data architecture and data related issues with client architect/team (in area of expertise) &lt;br&gt;C.Experienced in Gen AI/Agentic AI use case Implementation in medium to high scale project &lt;br&gt;D.Candidate should have worked in Data governance Solution &lt;br&gt;E.Knowledge in LLM and Prompt engineering &lt;br&gt;F.Analyze and assess the impact of the requirements on the data and its lifecycle &lt;br&gt;G.Lead the data architecture and design of complex, enterprise-level applications and systems &lt;br&gt;H.Breadth of experience in various client scenarios and situations &lt;br&gt;I.Experienced in Data Architecture-based sales and delivery &lt;br&gt;J.Thought leadership and innovation &lt;br&gt;K.Lead creation of new data assets offerings &lt;br&gt;L.Experience in handling OLTP data workloads &lt;br&gt; Technical Experience:&lt;br&gt;A.Strong experience in Azure is preferred with hands-on experience in 2 or more of these skills :Azure SQL DB ,Azure SQL Managed Instance ,Azure Data Lake Store, Azure Cosmos DB, Azure Database for PostgreSQL, Azure Database for MySQL &lt;br&gt;B.Experience in handling medium to large data migration projects &lt;br&gt;C.Candidate should have deep knowledge in Python and Large Language Model &lt;br&gt;D.Github Copilot experience is an added advantage &lt;br&gt;E.Experience in Azure purview/Profisee/Unity Catalog &lt;br&gt;F.Candidate must have around 5 years of extensive database experience (design + build) &lt;br&gt;I.Candidate must have 7-10 years of IT experience and around 5 years of database experience (design + build) &lt;br&gt;J.Architect for a medium sized client delivery project Professional Experience:&lt;br&gt;A.Should be able to drive the technology design meetings, propose technology design and architecture &lt;br&gt;B.Should have excellent client communication skills &lt;br&gt;C.Should have good analytical and problem-solving skills &lt;br&gt; Educational Qualification&lt;br&gt;A.Must have:BE/BTech/MCA &lt;br&gt;B.Good to have:ME/MTech &lt;br&gt;&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;&lt;br&gt;15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/819317/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/819317/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Tue, 31 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/819373/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Azure Databricks&lt;br&gt;&lt;b&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum 7.5 year(s) of experience is required&lt;br&gt;&lt;b&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:&lt;br&gt;As a Data Architect, a typical day involves defining the data requirements and designing the structure necessary for the application. This role includes modeling the data architecture, planning how data will be stored efficiently, and ensuring seamless integration across various components. The position requires a thoughtful approach to organizing data to support application functionality and scalability, collaborating with different stakeholders to align data strategies with project goals, and continuously refining data models to meet evolving needs.Key Responsibilities&lt;br&gt;A.Function as the Lead Data Architect for a small, simple project/proposal or as a team lead for medium/large sized project or proposal &lt;br&gt;B.Discuss specific Big data architecture and related issues with client architect/team (in area of expertise) &lt;br&gt;C.Worked in implementation of Databricks Gen AI/Agentic AI use case &lt;br&gt;D.Knowledge in LLM and Prompt engineering, AI foundry &lt;br&gt;E.Candidate should have worked in Data governance Solution &lt;br&gt;F.Analyze and assess the impact of the requirements on the data and its lifecycle &lt;br&gt;G.Lead Big data architecture and design medium-big Cloud based, Data and Analytical Solutions using Lambda architecture. &lt;br&gt;H.Breadth of experience in various client scenarios and situations &lt;br&gt;I.Experienced in Big Data Architecture-based sales and delivery &lt;br&gt;J.Thought leadership and innovation &lt;br&gt;K.Lead creation of new data assets offerings &lt;br&gt;L.Experience in handling OLTP and OLAP data workloads &lt;br&gt; Technical Experience:&lt;br&gt;A.Experience working in Medallion architecture involving Delta lake house principles &lt;br&gt;B.Expert level in Designing and Architect solutions in Azure Databricks, Azure Data lake, Delta Lake implementation. &lt;br&gt;C.Experience in Databricks GenAI Implementation &lt;br&gt;D.Experience in Azure purview/Profisee/Unity Catalog &lt;br&gt;E.Well versed in Real time and batch streaming concepts and experience in its implementation &lt;br&gt;F. Expert level experience in Azure cloud technologies like PySpark, Databricks, Python, Scala and SQL. &lt;br&gt;G.Exp in one or more Real-time/batch ingestion including:Azure Delta live tables , Autoloader &lt;br&gt;H.Exp in handling medium to large Big Data implementations &lt;br&gt;I. Strong understanding of data strategy. Data Quality and Delta lake components &lt;br&gt;J.Candidate must have 10-12 years of IT experience and around 5 years of extensive Big data experience (design + build) in Databricks &lt;br&gt;K.Architect for a medium sized client delivery project Professional Experience:&lt;br&gt;A.Should be able to drive the technology design meetings, propose technology design and architecture &lt;br&gt;B.Should have excellent client communication skills &lt;br&gt;C.Should have good analytical and problem-solving skills &lt;br&gt; Educational Qualification:&lt;br&gt;A.Must have:BE/BTech/MCA &lt;br&gt;B.Good to have:ME/MTech &lt;br&gt;&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;&lt;br&gt;15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/819373/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/819373/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Tue, 31 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/817912/tanzu-data-intelligence-mts3-rabbitmq-professional-at-vmware/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; Please Note: &lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; 1. If you are a first time user, please create your candidate login account before you apply for a job. (Click Sign In &amp;gt; Create Account) &lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; 2. If you already have a Candidate Account, please Sign-In before you apply. &lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; Job Description: &lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;b&gt; Area: RabbitMQ Core &lt;/b&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;b&gt; &lt;u&gt; Functions &lt;/u&gt; &lt;/b&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; Design and implement features for RabbitMQ and contribute to existing RabbitMQ features like Classic Queues, Quorum Queues, Streams. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Develop robust and scalable producer/consumer apps in Erlang / Java / Go &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Configure and manage RabbitMQ clusters, federation, and shovels. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Optimize message throughput, latency, queue performance, and reliability. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Implement HA using RabbitMQ clustering, queue mirroring, and quorum queues. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Ensure developed code meets security, compliance, and operational standards. &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;b&gt; &lt;u&gt; Technical Skills &lt;/u&gt; &lt;/b&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; Experience with developing and using messaging products. &lt;/li&gt; &lt;li&gt; Hands-on experience writing producer/consumer code in at least one modern language (Erlang, Java, Go, Python, &lt;u&gt; Node.js &lt;/u&gt; ). &lt;/li&gt; &lt;li&gt; High understanding of Queue / Stream products like Kafka. &lt;/li&gt; &lt;li&gt; High understanding of the following concepts: &lt;/li&gt; &lt;li&gt; Exchanges (direct, topic, fanout, headers) &lt;/li&gt; &lt;li&gt; Queues (classic, quorum, stream) &lt;/li&gt; &lt;li&gt; Bindings &amp;amp; routing patterns &lt;/li&gt; &lt;li&gt; Acknowledgements, prefetch, delivery semantics &lt;/li&gt; &lt;li&gt; Strong debugging and log analysis skills. &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; . &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Software Product&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;VMware&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Kalyani&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/817912/tanzu-data-intelligence-mts3-rabbitmq-professional-at-vmware/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/817912/tanzu-data-intelligence-mts3-rabbitmq-professional-at-vmware/</link>
  <title>[Full Time] Tanzu Data Intelligence Mts3 Rabbitmq Professional at VMware</title>
  <dc:date>Mon, 30 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/817719/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;Project Role :&lt;/b&gt;Data Architect&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;b&gt;Must have skills :&lt;/b&gt;AI &amp;amp; Data Solution Architecture&lt;b&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;12&lt;/b&gt; year(s) of experience is required&lt;b&gt;Educational Qualification :&lt;/b&gt; 15 years full time education&lt;b&gt;&lt;b&gt;Summary&lt;/b&gt;:&lt;/b&gt;&lt;br&gt; As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the architecture aligns with business needs and technical specifications. You will collaborate with various teams to ensure that data flows seamlessly across systems, while also addressing any challenges that arise in the data architecture process. Your role will be pivotal in shaping the data landscape of the organization, enabling effective data management and utilization. &lt;b&gt;Roles &amp;amp; Responsibilities:&lt;/b&gt; &lt;li&gt;Expected to be an SME.&lt;/li&gt;&lt;li&gt;Collaborate and manage the team to perform.&lt;/li&gt;&lt;li&gt;Responsible for team decisions.&lt;/li&gt;&lt;li&gt;Engage with multiple teams and contribute on key decisions.&lt;/li&gt;&lt;li&gt;Expected to provide solutions to problems that apply across multiple teams.&lt;/li&gt;&lt;li&gt;Facilitate knowledge sharing sessions to enhance team capabilities.&lt;/li&gt;&lt;li&gt;Develop and maintain documentation related to data architecture and design. &lt;b&gt;Professional &amp;amp; Technical Skills:&lt;/b&gt; &lt;b&gt;Must To Have Skills:&lt;/b&gt;&lt;br&gt;&lt;/li&gt;&lt;li&gt; Proficiency in Data &amp;amp; AI Solution Architecture.&lt;/li&gt;&lt;li&gt;Strong understanding of data modeling techniques and best practices.&lt;/li&gt;&lt;li&gt;Experience with cloud-based data solutions and architectures.&lt;/li&gt;&lt;li&gt;Familiarity with data integration tools and methodologies.&lt;/li&gt;&lt;li&gt;Ability to design scalable and efficient data storage solutions. &lt;b&gt;Additional Information:&lt;/b&gt; &lt;/li&gt;&lt;li&gt;The candidate should have minimum 12 years of experience in Data &amp;amp; AI Solution Architecture.&lt;/li&gt;&lt;li&gt;This position is based at our Bengaluru office.&lt;/li&gt;&lt;li&gt;A 15 years full time education is required.&lt;b&gt;Qualification&lt;/b&gt;&lt;br&gt;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/817719/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/817719/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Mon, 30 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/817914/oracle-dbaexadata-professional-at-capgemini/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;Your Role&lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;li&gt;Design, implement, and maintain Oracle database environments to ensure high availability, security, performance, and reliability for missioncritical applications. &lt;/li&gt;&lt;li&gt;Perform advanced Oracle administration including ASM, RAC, Data Guard, RMAN backups, performance tuning, and troubleshooting across onpremises and cloud platforms (OCI, AWS RDS). &lt;/li&gt;&lt;li&gt;Manage endtoend database operations using SQL, PL/SQL, OEM, SQL Developer, and ASMCMD while ensuring optimal performance and adherence to best practices. &lt;/li&gt;&lt;li&gt;Support system infrastructure with strong Linux/Unix and Windows administration skills and collaborate with crossfunctional teams for smooth operations. &lt;/li&gt;&lt;li&gt;Implement backup, recovery, monitoring, and security strategies while ensuring compliance, documentation, and continuous improvement of database environments. &lt;/li&gt;&lt;b&gt;Your Profile&lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;li&gt;6+ years of handson expertise in designing, implementing, and maintaining highavailability Oracle database environments for missioncritical applications. &lt;/li&gt;&lt;li&gt;Skilled in Oracle Architecture, ASM, RAC, and Data Guard, with strong command over SQL, PL/SQL, RMAN, OEM, SQL Developer, and ASMCMD for efficient database administration and optimization. &lt;/li&gt;&lt;li&gt;Expert in performance tuning, backup/recovery strategies, and troubleshooting, ensuring secure, scalable, and highperforming database operations across onpremises and cloud platforms. &lt;/li&gt;&lt;li&gt;Proficient in Linux/Unix and Windows administration, with solid understanding of OCI, AWS RDS, and modern cloud database concepts. &lt;/li&gt;&lt;li&gt;Committed to operational excellence, leveraging certifications like OCP, OCI Foundations/Architect, and Azure/GCP database credentials to deliver robust and reliable database solutions. &lt;/li&gt;&lt;b&gt;What Will You Love Working At Capgemini&lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;li&gt;Exposure to large, enterprisescale database environments, giving you handson experience with Oracle, Exadata, RAC, Data Guard, and missioncritical highavailability systems. &lt;/li&gt;&lt;li&gt;Diverse and global project opportunities, especially collaboration with clients, improving your technical, communication, and stakeholdermanagement skills. &lt;/li&gt;&lt;li&gt;Strong learning and certification culture, supporting growth in Oracle OCP, OCI Architect, Azure, and other cloud/database certifications, collaborative and peoplefocused work environment, valuing knowledge sharing, worklife balance, and continuous professional development. &lt;/li&gt;&lt;br&gt; &lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Capgemini&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/817914/oracle-dbaexadata-professional-at-capgemini/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/817914/oracle-dbaexadata-professional-at-capgemini/</link>
  <title>[Full Time] Oracle DBA+Exadata Professional at Capgemini</title>
  <dc:date>Mon, 30 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/817952/l1-production-support-engineer-at-luxoft/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&lt;strong&gt;Project description&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;ITT (Investment Trading Technology) team supports TM (Treasury and Markets) and is looking for the right individual to support Murex 3.1 applications. Murex is one of the leading applications in the areas of global trading, risk management, and trade processing, and we are using it across asset classes, Front to Risk to Back in a setup with multiple production environments and grids, connections to many upstream and downstream systems. &lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Responsibilities&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Resolve production incidents in a smooth and timely manner with minimal impact to business. &lt;/li&gt;&lt;li&gt;Put in controls and checks in place to monitor and smooth the functioning of production systems. &lt;/li&gt;&lt;li&gt;Investigate and understand the cause of defects raised. &lt;/li&gt;&lt;li&gt;Stakeholder relationships across all groups within the Bank, ensuring alignment with common objectives and fulfillment of criteria and requirements across the various businesses and units. &lt;/li&gt;&lt;li&gt;Member of the development/ implementation team (including offshore) working on assigned tasks, projects, and it initiatives. &lt;/li&gt;&lt;li&gt;Interacts with members and other functional teams, including members of the offshore team. &lt;/li&gt;&lt;li&gt;Work with team members participating in shadowing and training activities. &lt;/li&gt;&lt;li&gt;Adhere to information controls and security frameworks/ procedures, including the change and incident management process. &lt;/li&gt;&lt;li&gt;Provide general administrative support to the team, including meetings, knowledge transfer, and activity reporting. &lt;/li&gt;&lt;li&gt;You are expected to work in extended hours (shifts weekends) on a need basis. &lt;/li&gt;&lt;li&gt;Demonstrate a functional acumen to support how solutions will address client goals while maintaining alignment with industry best practices. &lt;/li&gt;&lt;li&gt;Day-to-day delivery and support of key treasury systems/initiatives. &lt;/li&gt;&lt;li&gt;Work efficiently and prioritize system enhancement/upgrades. &lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Skills&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Must have&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Overall 2 to 4 years of experience in Banking application L1 production support. &lt;/li&gt;&lt;li&gt;First-hand experience with Unix and SQL. &lt;/li&gt;&lt;li&gt;Exposure to production monitoring. &lt;/li&gt;&lt;li&gt;Superior technical, analytical, and communication capabilities. &lt;/li&gt;&lt;li&gt;Current on new technology, standards, protocols, and tools in areas relevant to the rapidly changing digital environment. &lt;/li&gt;&lt;li&gt;Well-versed in digital technologies, automation, environment support, and key technology/vendor solutions. &lt;/li&gt;&lt;li&gt;Exposure to capital market experience. &lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Nice to have&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Good verbal written communication skills. &lt;/li&gt;&lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Legal&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Luxoft&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/817952/l1-production-support-engineer-at-luxoft/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/817952/l1-production-support-engineer-at-luxoft/</link>
  <title>[Full Time] L1 Production Support Engineer at Luxoft</title>
  <dc:date>Mon, 30 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/817702/data-architect-at-tata-consultancy/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&lt;strong&gt;Role &amp;amp; responsibilities&lt;/strong&gt; &lt;/p&gt;&lt;ul&gt;&lt;li&gt;&lt;strong&gt;Core Data Modeling&lt;/strong&gt; &lt;/li&gt;&lt;li&gt;Strong expertise in conceptual, logical, physical modeling &lt;/li&gt;&lt;li&gt;Proficiency with modeling tools such as:&amp;nbsp; &lt;/li&gt;&lt;li&gt;ER/Studio, Erwin, Lucidchart, PowerDesigner, or Microsoft Fabric data modeling tools &lt;/li&gt;&lt;li&gt;&lt;strong&gt;Deep understanding of:&amp;nbsp;&lt;/strong&gt; &lt;/li&gt;&lt;li&gt;3NF, star/snowflake schemas, Data Vault 2.0, and denormalized lakehouse models &lt;/li&gt;&lt;li&gt;Surrogate keys, hierarchies, reference/master data structures &lt;/li&gt;&lt;li&gt;Slowly Changing Dimensions (SCD Types 1/2/3) &lt;/li&gt;&lt;li&gt;Fact/Dimension modeling and grain definitions &lt;/li&gt;&lt;li&gt;Modern Data Platform Experience &lt;/li&gt;&lt;li&gt;&lt;strong&gt;Experience modeling in:&amp;nbsp;&lt;/strong&gt; &lt;/li&gt;&lt;li&gt;Lakehouse architectures (Databricks Delta Lake, Fabric Lakehouse) &lt;/li&gt;&lt;li&gt;Cloud data warehouses (Synapse Dedicated, Snowflake, BigQuery) &lt;/li&gt;&lt;li&gt;Azure SQL/MI for operational/reporting stores &lt;/li&gt;&lt;li&gt;Understanding of:&amp;nbsp; &lt;/li&gt;&lt;li&gt;Schema evolution &lt;/li&gt;&lt;li&gt;Columnar storage formats (Delta, Parquet) &lt;/li&gt;&lt;li&gt;Distributed processing constraints (Spark engines) &lt;/li&gt;&lt;li&gt;&lt;strong&gt;Data Governance &amp;amp; Quality&lt;/strong&gt; &lt;/li&gt;&lt;li&gt;&lt;strong&gt;Experience with data dictionaries, glossaries, lineage, and governance tools (Microsoft Purview, Collibra).&lt;/strong&gt; &lt;/li&gt;&lt;li&gt;&lt;strong&gt;Familiarity with metadata management, PII classification, RBAC, and regulatory data handling.&lt;/strong&gt; &lt;/li&gt;&lt;li&gt;&lt;strong&gt;Soft Skills&lt;/strong&gt; &lt;/li&gt;&lt;li&gt;&lt;strong&gt;Strong communication, onboarding, and ability to simplify complexity.&lt;/strong&gt; &lt;/li&gt;&lt;li&gt;&lt;strong&gt;Proficient in running cross-functional workshops with business and engineering teams.&lt;/strong&gt; &lt;/li&gt;&lt;li&gt;&lt;strong&gt;Highly organized and&lt;/strong&gt; &lt;strong&gt;documentation-driven.&lt;/strong&gt; &lt;/li&gt;&lt;li&gt;Experience with &lt;strong&gt;Microsoft Purview&lt;/strong&gt; for governance and lineage&amp;nbsp; &lt;/li&gt;&lt;li&gt;Knowledge of &lt;strong&gt;Synapse serverless SQL pools&lt;/strong&gt;, KQL, Timeseries analytics&amp;nbsp; &lt;/li&gt;&lt;li&gt;Expertise in &lt;strong&gt;MLOps&lt;/strong&gt;: Databricks Feature Store, Model Registry, Fabric ML workloads&amp;nbsp; &lt;/li&gt;&lt;li&gt;Background in &lt;strong&gt;Data Science&lt;/strong&gt; (Python, ML frameworks)&amp;nbsp; &lt;/li&gt;&lt;li&gt;Experience designing solutions for:&amp;nbsp; &lt;/li&gt;&lt;li&gt;Customer 360 platforms &lt;/li&gt;&lt;li&gt;Supply chain analytics &lt;/li&gt;&lt;li&gt;Digital media analytics &lt;/li&gt;&lt;li&gt;Experience with &lt;strong&gt;cost governance&lt;/strong&gt; and FinOps&amp;nbsp; &lt;/li&gt;&lt;li&gt;Certifications (nice to have):&amp;nbsp; &lt;/li&gt;&lt;li&gt;Databricks Certified Data Engineer/Architect &lt;/li&gt;&lt;li&gt;Microsoft Fabric Analytics Engineer &lt;/li&gt;&lt;li&gt;Azure Solutions Architect Expert &lt;/li&gt;&lt;li&gt;DP-203 (Azure Data Engineer) &lt;/li&gt;&lt;/ul&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Preferred candidate profile&lt;/strong&gt; &lt;/p&gt;&lt;p&gt;Bengaluru, Chennai, Kolkata, Kochi, Mumbai and Pune.&lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Architect / Designer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Tata Consultancy&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Kochi&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/817702/data-architect-at-tata-consultancy/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/817702/data-architect-at-tata-consultancy/</link>
  <title>[Full Time] Data Architect at Tata Consultancy</title>
  <dc:date>Mon, 30 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/817741/data-platform-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;Project Role :&lt;/b&gt;Data Platform Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Architects the data platform blueprint and implements the design, encompassing the relevant data platform components. Collaborates with the Integration Architects and Data Architects to ensure cohesive integration between systems and data models. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Snowflake Data Warehouse&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;12&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:&lt;br&gt;As an Application Developer, you will design, build, and configure applications to meet business process and application requirements. A typical day involves collaborating with various teams to gather requirements, developing application features, and ensuring that the applications align with business objectives. You will also engage in problem-solving discussions and contribute to the overall application strategy, ensuring that the solutions provided are effective and efficient.&lt;b&gt;Roles &amp;amp; Responsibilities:&lt;/b&gt;&lt;br&gt;- Expected to be an SME.&lt;br&gt;- Collaborate and manage the team to perform.&lt;br&gt;- Responsible for team decisions.&lt;br&gt;- Engage with multiple teams and contribute on key decisions.&lt;br&gt;- Expected to provide solutions to problems that apply across multiple teams.&lt;br&gt;- Facilitate knowledge sharing sessions to enhance team capabilities.&lt;br&gt;- Monitor project progress and ensure alignment with business goals.&lt;b&gt;Professional &amp;amp; Technical Skills:&lt;/b&gt;&lt;br&gt;&lt;b&gt;Must To Have Skills:&lt;/b&gt;&lt;br&gt;- Proficiency in Snowflake Data Warehouse.&lt;br&gt;- Strong understanding of data modeling and ETL processes.&lt;br&gt;- Experience with SQL and data querying techniques.&lt;br&gt;- Familiarity with cloud-based data solutions and architecture.&lt;br&gt;- Ability to troubleshoot and optimize data workflows.&lt;br&gt; &lt;b&gt;Additional Information:&lt;/b&gt;&lt;br&gt;- The candidate should have minimum 12 years of experience in Snowflake Data Warehouse.&lt;br&gt;- This position is based at our Bengaluru office.&lt;br&gt;- A 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;&lt;br&gt;15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/817741/data-platform-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/817741/data-platform-architect-at-accenture/</link>
  <title>[Full Time] Data Platform Architect at Accenture</title>
  <dc:date>Mon, 30 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/818114/custom-software-engineer-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Custom Software Engineer&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Develop custom software solutions to design, code, and enhance components across systems or applications. Use modern frameworks and agile practices to deliver scalable, high-performing solutions tailored to specific business needs. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Azure Databricks&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;12&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As an Application Lead, you will lead the effort to design, build, and configure applications, acting as the primary point of contact. Your typical day will involve collaborating with various teams to ensure that application development aligns with business objectives, overseeing project timelines, and facilitating communication among stakeholders to drive successful project outcomes. You will also engage in problem-solving activities, providing guidance and support to your team members while ensuring adherence to best practices in application development. Roles &amp;amp; Responsibilities&lt;li&gt;Expected to be an SME.&lt;/li&gt;&lt;li&gt;Collaborate and manage the team to perform.&lt;/li&gt;&lt;li&gt;Responsible for team decisions.&lt;/li&gt;&lt;li&gt;Engage with multiple teams and contribute on key decisions.&lt;/li&gt;&lt;li&gt;Expected to provide solutions to problems that apply across multiple teams.&lt;/li&gt;&lt;li&gt;Facilitate training and development opportunities for team members to enhance their skills.&lt;/li&gt;&lt;li&gt;Monitor project progress and implement necessary adjustments to meet deadlines. Professional &amp;amp; Technical &lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;&lt;br&gt;Must To Have &lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;&lt;br&gt;Proficiency in Microsoft Azure Databricks.&lt;/li&gt;&lt;li&gt;Strong understanding of cloud computing concepts and architecture.&lt;/li&gt;&lt;li&gt;Experience with data integration and ETL processes.&lt;/li&gt;&lt;li&gt;Familiarity with data analytics and visualization tools.&lt;/li&gt;&lt;li&gt;Knowledge of programming languages such as Python or Scala. &lt;br&gt;Additional Information&lt;/li&gt;&lt;li&gt;The candidate should have minimum 12 years of experience in Microsoft Azure Databricks.&lt;/li&gt;&lt;li&gt;This position is based at our Hyderabad office.&lt;/li&gt;&lt;li&gt;A 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/818114/custom-software-engineer-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/818114/custom-software-engineer-at-accenture/</link>
  <title>[Full Time] Custom Software Engineer at Accenture</title>
  <dc:date>Mon, 30 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/818348/tanzu-data-intelligence-rabbitmq-professional-at-vmware/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; Please Note: &lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; 1. If you are a first time user, please create your candidate login account before you apply for a job. (Click Sign In &amp;gt; Create Account) &lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; 2. If you already have a Candidate Account, please Sign-In before you apply. &lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; Job Description: &lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; Area: RabbitMQ Core &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;u&gt; Functions &lt;/u&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; Design and implement features for RabbitMQ and contribute to existing RabbitMQ features like Classic Queues, Quorum Queues, Streams. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Develop robust and scalable producer/consumer apps in Erlang / Java / Go &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Configure and manage RabbitMQ clusters, federation, and shovels. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Optimize message throughput, latency, queue performance, and reliability. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Implement HA using RabbitMQ clustering, queue mirroring, and quorum queues. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Ensure developed code meets security, compliance, and operational standards &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;u&gt; Technical Skills &lt;/u&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; Strong development experience with messaging products. &lt;/li&gt; &lt;li&gt; Hands-on experience writing producer/consumer code in at least one modern language (Erlang, Java, Go, Python, &lt;u&gt; Node.js &lt;/u&gt; ). &lt;/li&gt; &lt;li&gt; Deep understanding of Queue / Stream products like: Kafka, Tibco, MQ Series, etc. &lt;span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; Deep understanding of the following concepts: &lt;/li&gt; &lt;li&gt; Exchanges (direct, topic, fanout, headers) &lt;/li&gt; &lt;li&gt; Queues (classic, quorum, stream) &lt;/li&gt; &lt;li&gt; Bindings &amp;amp; routing patterns &lt;/li&gt; &lt;li&gt; Acknowledgements, prefetch, delivery semantics &lt;/li&gt; &lt;li&gt; Strong debugging and log analysis skills. &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; . &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Software Product&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;VMware&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Kalyani&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/818348/tanzu-data-intelligence-rabbitmq-professional-at-vmware/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/818348/tanzu-data-intelligence-rabbitmq-professional-at-vmware/</link>
  <title>[Full Time] Tanzu Data Intelligence RabbitMQ Professional at VMware</title>
  <dc:date>Mon, 30 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/818001/oracle-iam-professional-at-cirruslabs/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;span&gt; &amp;nbsp; &lt;span&gt; &lt;span&gt; &lt;br&gt; &lt;strong&gt; Experience - 3-8 years &lt;br&gt; Location - Hyderabad &lt;/strong&gt; &lt;/span&gt; &lt;/span&gt; Oracle Access Manager (OAM) 14c Architecture and deployment of OAM in containerized environments WebGate configuration and integration with OHS and protected applications &lt;br&gt; Authentication schemes, policies, and federation (SAML, OAuth, OIDC) REST API usage for automation and integration Troubleshooting access issues and performance tuning &lt;br&gt; o Oracle Identity Governance (OIG) 14c Design and implementation of user provisioning workflows &lt;br&gt; Role and policy management, certification campaigns &lt;br&gt; Connector development and integration with target systems &lt;br&gt; Customization using SOA composites and event handlers &lt;br&gt; Deployment and scaling of OIG on Kubernetes clusters o Oracle Unified Directory (OUD) 14c &lt;br&gt; Schema design, replication, and performance tuning &lt;br&gt; Integration with OAM/OIG as identity store &lt;br&gt; Containerized deployment and lifecycle management &lt;br&gt; Backup, restore, and monitoring in cloud-native environments o Oracle HTTP Server (OHS) 14c &lt;br&gt; SSL/TLS configuration and reverse proxy setup &lt;br&gt; Integration with WebGate and OAM &lt;br&gt; Containerization of OHS and orchestration in Kubernetes &lt;br&gt; Performance tuning and log analysis &lt;br&gt; Containerization &amp;amp; Kubernetes Skills o Kubernetes &lt;br&gt; Designing and deploying Oracle IAM stack on Kubernetes clusters &lt;br&gt; Helm chart customization for Oracle products &lt;br&gt; Managing StatefulSets, Deployments, Services, and Ingress for IAM workloads &lt;br&gt; Configuring persistent storage, secrets, and config maps &lt;br&gt; Monitoring and logging using Prometheus, Grafana, ELK, or similar tools &lt;br&gt; Autoscaling and high availability strategies o Docker &lt;br&gt; Building and maintaining Docker images for Oracle IAM components &lt;br&gt; Writing and optimizing Dockerfiles for OAM, OIG, OUD, and OHS &lt;br&gt; Managing container lifecycle, volumes, and networking &lt;br&gt; Security hardening of containers o CI/CD &amp;amp; DevOps Integration &lt;br&gt; Experience with GitLab CI, or GitHub Actions for automated deployments &lt;br&gt; Integration of IAM components into CI/CD pipelines &lt;br&gt; Infrastructure as Code (IaC) using Terraform or Ansible for Kubernetes resources &lt;br&gt; Cloud &amp;amp; Platform Experience o Experience deploying Oracle IAM on- &lt;br&gt; Oracle Cloud Infrastructure (OCI) &lt;br&gt; AWS, Azure, or GCP (preferred) o Familiarity with cloud-native IAM services and hybrid integration Required Qualifications: &lt;br&gt; Proven experience with Oracle IAM Suite components listed above &lt;br&gt; Strong understanding of IAM workflows, authentication protocols, and security best practices &lt;br&gt; Experience working in Agile/Scrum environments &lt;br&gt; Excellent communication and documentation skills Preferred Certifications: &lt;br&gt; Oracle Certified Expert- Identity and Access Management &lt;br&gt; Kubernetes Administrator (CKA) or Kubernetes Application Developer (CKAD) &lt;br&gt; OCI Architect Associate or Professional. &lt;/span&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Software Product&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Cirruslabs&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/818001/oracle-iam-professional-at-cirruslabs/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/818001/oracle-iam-professional-at-cirruslabs/</link>
  <title>[Full Time] Oracle IAM Professional at Cirruslabs</title>
  <dc:date>Fri, 27 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/818304/oracle-dba-at-quinnox/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&lt;strong&gt;1. Database Installation &amp;amp; Configuration&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Install, configure, and upgrade &lt;strong&gt;Oracle Database (11g/12c/19c)&lt;/strong&gt; across Linux and UNIX environments.&lt;/li&gt;&lt;li&gt;Create and manage databases using DBCA and manual methods.&lt;/li&gt;&lt;li&gt;Configure listener, TNS entries, and network connectivity.&lt;/li&gt;&lt;li&gt;Apply PSU/RU patches and perform version upgrades with minimal downtime.&lt;/li&gt;&lt;li&gt;Implement multitenant architecture (CDB/PDB) administration.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;2. Backup &amp;amp; Recovery Management&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Design and implement backup strategies using &lt;strong&gt;Oracle Recovery Manager (RMAN)&lt;/strong&gt;.&lt;/li&gt;&lt;li&gt;Configure full, incremental, cumulative, and archive log backups.&lt;/li&gt;&lt;li&gt;Perform database recovery scenarios including point-in-time recovery (PITR) and block-level recovery.&lt;/li&gt;&lt;li&gt;Conduct regular restore validation and disaster recovery drills.&lt;/li&gt;&lt;li&gt;Manage backup retention policies and optimize storage usage.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;3. High Availability &amp;amp; Disaster Recovery&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Configure and manage &lt;strong&gt;Oracle Data Guard&lt;/strong&gt; (Physical &amp;amp; Logical Standby).&lt;/li&gt;&lt;li&gt;Monitor redo transport and apply lag; perform switchover and failover activities.&lt;/li&gt;&lt;li&gt;Support &lt;strong&gt;Oracle Real Application Clusters (RAC)&lt;/strong&gt; environments for high availability.&lt;/li&gt;&lt;li&gt;Implement ASM (Automatic Storage Management) disk groups and manage storage allocation.&lt;/li&gt;&lt;li&gt;Coordinate DR testing and document recovery procedures.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;4. Performance Monitoring &amp;amp; Tuning&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Monitor database health using AWR, ADDM, ASH reports.&lt;/li&gt;&lt;li&gt;Identify and resolve performance bottlenecks related to CPU, memory, I/O, and SQL queries.&lt;/li&gt;&lt;li&gt;Tune poorly performing SQL queries using execution plans and indexing strategies.&lt;/li&gt;&lt;li&gt;Optimize memory parameters (SGA/PGA) and initialization parameters.&lt;/li&gt;&lt;li&gt;Implement partitioning and indexing strategies to improve performance.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;5. Security &amp;amp; Compliance&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Create and manage database users, roles, and privileges.&lt;/li&gt;&lt;li&gt;Implement password policies and auditing policies.&lt;/li&gt;&lt;li&gt;Configure Transparent Data Encryption (TDE).&lt;/li&gt;&lt;li&gt;Perform database vulnerability assessments and apply security patches.&lt;/li&gt;&lt;li&gt;Ensure compliance with enterprise security standards.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;6. Database Maintenance &amp;amp; Monitoring&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Perform space management activities including tablespace resizing and datafile management.&lt;/li&gt;&lt;li&gt;Monitor archive log generation and FRA usage.&lt;/li&gt;&lt;li&gt;Schedule and manage jobs using DBMS_SCHEDULER.&lt;/li&gt;&lt;li&gt;Automate routine DBA tasks using shell scripting and PL/SQL.&lt;/li&gt;&lt;li&gt;Proactively monitor alert logs and resolve database issues.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;7. Cloud &amp;amp; Migration Exposure&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Support database deployments in cloud platforms (OCI/AWS/Azure).&lt;/li&gt;&lt;li&gt;Manage databases hosted on &lt;strong&gt;Oracle Cloud Infrastructure (OCI)&lt;/strong&gt;.&lt;/li&gt;&lt;li&gt;Perform on-prem to cloud database migrations using Data Pump and RMAN duplication.&lt;/li&gt;&lt;li&gt;Work with hybrid cloud architectures and storage integrations.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;8. Production Support&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Provide 24/7 on-call support for critical production databases.&lt;/li&gt;&lt;li&gt;Perform root cause analysis (RCA) for incidents and implement preventive measures.&lt;/li&gt;&lt;li&gt;Coordinate with application teams for release deployments.&lt;/li&gt;&lt;li&gt;Maintain SLA adherence and system uptime above 99.9%.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;9. Documentation &amp;amp; Collaboration&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Maintain detailed documentation for database architecture and procedures.&lt;/li&gt;&lt;li&gt;Work closely with developers for schema design and query optimization.&lt;/li&gt;&lt;li&gt;Participate in capacity planning and hardware sizing discussions.&lt;/li&gt;&lt;li&gt;Support audit and compliance documentation requirements.&lt;/li&gt;&lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;DBA / Data warehousing - Other&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Quinnox&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/818304/oracle-dba-at-quinnox/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/818304/oracle-dba-at-quinnox/</link>
  <title>[Full Time] oracle DBA at Quinnox</title>
  <dc:date>Fri, 27 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/819206/erwin-data-modeler-architect-vois-vodafone-hiring-at-vois/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&lt;strong&gt;raja.arun@vodafone.com &lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;+91 9900123088 / 6383074760 &lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt; https://jobs.vodafone.com/careers/job/563018695907408&lt;/p&gt;&lt;br&gt;&lt;p&gt;https://jobs.vodafone.com/careers/job/563018694663560&lt;/p&gt;&lt;br&gt;&lt;p&gt;Core competencies, knowledge and experience [max 5]: &lt;/p&gt;&lt;p&gt;&lt;strong&gt;Essential: &lt;/strong&gt; &lt;/p&gt;&lt;ol type=&quot;1&quot;&gt;&lt;li&gt;&lt;strong&gt;Experience on&lt;/strong&gt; &lt;strong&gt;ETL&lt;/strong&gt; &lt;strong&gt;solutions&lt;/strong&gt; &lt;strong&gt;using&lt;/strong&gt; &lt;strong&gt;Ab-Initio&lt;/strong&gt; &lt;strong&gt;&amp;amp; Teradata&lt;/strong&gt; &lt;/li&gt;&lt;li&gt;&lt;strong&gt;Exposure on Data Modelling tools&lt;/strong&gt; &lt;strong&gt;eg.&lt;/strong&gt; &lt;strong&gt;Power Designer, Erwin etc.&lt;/strong&gt; &lt;/li&gt;&lt;li&gt;&lt;strong&gt;Knowledge on&lt;/strong&gt; &lt;strong&gt;Data Warehousing, Data Modelling, Data Profiling etc.&lt;/strong&gt; &lt;/li&gt;&lt;/ol&gt;&lt;p&gt;&lt;strong&gt;Experience: &lt;/strong&gt; &lt;/p&gt;&lt;ol type=&quot;1&quot;&gt;&lt;li&gt;&lt;strong&gt;Overall&lt;/strong&gt; &lt;strong&gt;experience of&lt;/strong&gt; &lt;strong&gt;9-12&lt;/strong&gt; &lt;strong&gt;years&lt;/strong&gt; &lt;/li&gt;&lt;li&gt;&lt;strong&gt;Relevant experience of 4-5 years&lt;/strong&gt; &lt;/li&gt;&lt;/ol&gt;&lt;p&gt;&lt;strong&gt;Must have technical / professional qualifications:&lt;/strong&gt; &lt;strong&gt;B.E. / B.&lt;/strong&gt; &lt;strong&gt;Tech ,&lt;/strong&gt; &lt;strong&gt;BCA /&lt;/strong&gt; &lt;strong&gt;MCA ,&lt;/strong&gt; &lt;strong&gt;BSc / MSc (Comp Science)&lt;/strong&gt; &lt;/p&gt;&lt;br&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;VOIS&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Pune&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/819206/erwin-data-modeler-architect-vois-vodafone-hiring-at-vois/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/819206/erwin-data-modeler-architect-vois-vodafone-hiring-at-vois/</link>
  <title>[Full Time] Erwin Data Modeler Architect - Vois Vodafone Hiring at VOIS</title>
  <dc:date>Fri, 27 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/817522/software-developer-sql-power-bi-at-siemens/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt;&lt;div&gt;&lt;div&gt;&lt;div&gt;&lt;div&gt;&lt;div&gt;&lt;p&gt;&lt;strong&gt;&lt;span&gt;&lt;/span&gt;&lt;/strong&gt;&lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;&lt;strong&gt;&lt;span&gt;We are looking for a Software Engineer SQL, Power BI &amp;amp; Automation&lt;/span&gt;&lt;/strong&gt;&lt;span&gt;&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;We are looking for a Software Engineer skilled in SQL, Power BI, and the Microsoft ecosystem to build scalable data solutions, optimize databases, and deliver highquality analytics. The role includes developing dashboards, automating data workflows, implementing data consistency checks, and integrating data across SQL, SharePoint, M365, Snowflake and other sources. You will work in a DevOps model across development, testing, and UAT, using GitLab for CI/CD. Responsibilities also include handling Azure AD security and governance while supporting AIdriven initiatives using Generative AI and LLM concepts. The ideal candidate is meticulous, collaborative, and comfortable working in dynamic Agile environments.&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;&lt;span&gt;Youll make a difference by:&lt;/span&gt;&lt;/strong&gt;&lt;/p&gt;&lt;ol&gt;&lt;li&gt;Development:&lt;/li&gt;&lt;/ol&gt;&lt;ul&gt;&lt;li&gt;Design, optimize, and maintain complex SQL databases, including schemas, stored procedures, and highperformance queries.&lt;/li&gt;&lt;li&gt;Implement database solutions to improve scalability, reliability, and data integrity.&lt;/li&gt;&lt;li&gt;Work in a DevOps model, handling endtoend activities across development, testing, and UAT environments.&lt;/li&gt;&lt;li&gt;Implement automated data consistency checks to ensure accuracy, reliability, and integrity across systems.&lt;/li&gt;&lt;li&gt;Ensure smooth deployment cycles by collaborating closely across Dev, Test, and UAT to maintain quality and continuity.&lt;/li&gt;&lt;li&gt;Monitor and tune performance (query tuning, indexing, execution plans) across environments.&lt;/li&gt;&lt;li&gt;Develop interactive, enterprisegrade dashboards and reports using Power BI Desktop and Power BI Service.&lt;/li&gt;&lt;li&gt;Build robust data models (star/snowflake), apply DAX, and shape data with Power Query.&lt;/li&gt;&lt;li&gt;Partner with stakeholders to define KPIs and deliver actionable insights.&lt;/li&gt;&lt;li&gt;Integrate data from SQL Servers, Streaming Data sources, SharePoint Online, Excel, Dataverse, and REST APIs.&lt;/li&gt;&lt;li&gt;Build end-to-end dataflows and lightweight automations using Power Automate.&lt;/li&gt;&lt;li&gt;Ensure database best practices, coding standards, and documentation are consistently applied.&lt;/li&gt;&lt;li&gt;Build and optimize SQL queries for analytics, reporting, and application integration.&lt;/li&gt;&lt;li&gt;Work with Linux-based environments for deployment and operations.&lt;/li&gt;&lt;li&gt;Apply understanding of Generative AI concepts, LLMs, and prompt engineering to data/analytics use cases.&lt;/li&gt;&lt;li&gt;Support data preprocessing and feature engineering workflows for model readiness.&lt;/li&gt;&lt;li&gt;Contribute to AI-powered feature implementation and model deployment/optimization in production settings.&lt;/li&gt;&lt;/ul&gt;&lt;ol&gt;&lt;li&gt;Collaboration:&lt;/li&gt;&lt;/ol&gt;&lt;ul&gt;&lt;li&gt;Work closely with other developers, product managers, and designers in an Agile environment.&lt;/li&gt;&lt;li&gt;Proactively participate in code reviews.&lt;/li&gt;&lt;/ul&gt;&lt;ol&gt;&lt;li&gt;Continuous Integration/Continuous Deployment (CICD):&lt;/li&gt;&lt;/ol&gt;&lt;ul&gt;&lt;li&gt;Utilize GitLab for effective version control, issue tracking, and CICD pipelines.&lt;/li&gt;&lt;li&gt;Build and maintain CICD pipelines to automate testing, deployment, and monitoring processes.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;&lt;span&gt;Youll win us over by:&lt;/span&gt;&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;u&gt;&lt;span&gt;Technical Expertise:&lt;/span&gt;&lt;/u&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Bachelors degree in computer science or a related technical field, or equivalent experience.&lt;/li&gt;&lt;li&gt;3-8 Years of professional experience working with SQL, Power BI.&lt;/li&gt;&lt;li&gt;Strong expertise in SQL and database development, including sophisticated query optimization, stored procedures, and database design.&lt;/li&gt;&lt;li&gt;Sophisticated knowledge of Oracle DB or other relational databases, including performance tuning and administration.&lt;/li&gt;&lt;li&gt;Experience implementing AI-powered features in production applications&lt;/li&gt;&lt;li&gt;Knowledge of AI/ML model deployment and optimization&lt;/li&gt;&lt;li&gt;Excellent analytical skills and an understanding of data structures and algorithms.&lt;/li&gt;&lt;li&gt;Proven understanding of CICD practices and experience with GitLab.&lt;/li&gt;&lt;li&gt;Ability to work independently or with a team in a dynamic environment.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;u&gt;&lt;span&gt;General Qualities:&lt;/span&gt;&lt;/u&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Excellent problem-solving and analytical skills.&lt;/li&gt;&lt;li&gt;Solid attention to detail.&lt;/li&gt;&lt;/ul&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Siemens&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Pune&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/817522/software-developer-sql-power-bi-at-siemens/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/817522/software-developer-sql-power-bi-at-siemens/</link>
  <title>[Full Time] Software Developer (SQL, Power BI ) at Siemens</title>
  <dc:date>Thu, 26 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/817710/aws-data-architect-at-tata-consultancy/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&lt;strong&gt;Role &amp;amp; responsibilities&lt;/strong&gt; &lt;/p&gt;&lt;ul&gt;&lt;li&gt;Relational SQL/ Caching expertise Deep knowledge of Amazon Aurora PostgreSQL, ElastiCache etc..&lt;/li&gt;&lt;li&gt;Data modeling Experience in OLTP and OLAP schemas, normalization, denormalization, indexing, and partitioning.&lt;/li&gt;&lt;li&gt;Schema design &amp;amp; migration  Defining best practices for schema evolution when migrating from SQL Server to PostgreSQL.&lt;/li&gt;&lt;li&gt;Data governance  Designing data lifecycle policies, archival strategies, and regulatory compliance frameworks.&lt;/li&gt;&lt;li&gt;AWS Glue &amp;amp; AWS DMS  Leading data migration strategies to Aurora PostgreSQL.&lt;/li&gt;&lt;li&gt;ETL &amp;amp; Data Pipelines  Expertise in Extract, Transform, Load (ETL) workflows . Glue jobs features and event-driven architectures.&lt;/li&gt;&lt;li&gt;Data transformation &amp;amp; mapping  PostgreSQL PL/pgSQL migration / transformation expertise while ensuring data integrity.&lt;/li&gt;&lt;li&gt;Cross-platform data integration  Connecting cloud and on-premises / other cloud data sources.&lt;/li&gt;&lt;li&gt;AWS Data Services  Strong experience in S3, Glue, Lambda, Redshift, Athena, and Kinesis.&lt;/li&gt;&lt;li&gt;Infrastructure as Code (IaC)  Using Terraform, CloudFormation, or AWS CDK for database provisioning.&lt;/li&gt;&lt;li&gt;Security &amp;amp; Compliance  Implementing IAM, encryption (AWS KMS), access control policies, and compliance frameworks (eg. GDPR ,PII).&lt;/li&gt;&lt;li&gt;Query tuning &amp;amp; indexing strategies  Optimizing queries for high performance.&lt;/li&gt;&lt;li&gt;Capacity planning &amp;amp; scaling  Ensuring high availability, failover mechanisms, and auto-scaling strategies.&lt;/li&gt;&lt;li&gt;Data partitioning &amp;amp; storage optimization  Designing cost-efficient hot/cold data storage policies.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Should have experience with setting up the AWS architecture as per the project requirements&lt;/strong&gt;&lt;/li&gt;&lt;/ul&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Preferred candidate profile&lt;/strong&gt; &lt;/p&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;Data Warehousing  Expertise in Amazon Redshift, Snowflake, or BigQuery.&lt;/li&gt;&lt;li&gt;Big Data Processing  Familiarity with Apache Spark, EMR, Hadoop, or Kinesis.&lt;/li&gt;&lt;li&gt;Data Lakes &amp;amp; Analytics  Experience in AWS Lake Formation, Glue Catalog, and Athena.&lt;/li&gt;&lt;li&gt;Machine Learning Pipelines  Understanding of SageMaker, BedRock etc. for AI-driven analytics.&lt;/li&gt;&lt;li&gt;CI/CD for Data Pipelines  Knowledge of AWS CodePipeline, Jenkins, or GitHub Actions.&lt;/li&gt;&lt;li&gt;Serverless Data Architectures  Experience with event-driven systems (SNS, SQS, Step Functions).&lt;/li&gt;&lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Tata Consultancy&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/817710/aws-data-architect-at-tata-consultancy/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/817710/aws-data-architect-at-tata-consultancy/</link>
  <title>[Full Time] AWS Data Architect at Tata Consultancy</title>
  <dc:date>Thu, 26 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/817739/data-architect-aws-at-leading-client/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt;&lt;div&gt;&lt;p&gt;We are looking for a skilled Data Architect with expertise in AWS services to design and implement scalable data solutions. The ideal candidate will have 12+ years of experience in the field.&lt;/p&gt;&lt;div&gt;&lt;br&gt;&lt;b&gt;Roles and Responsibility&lt;/b&gt;&lt;span&gt;&lt;/span&gt;&lt;/div&gt;&lt;ul&gt;&lt;li&gt;Design, architect, and implement scalable data solutions using key AWS services.&lt;/li&gt;&lt;li&gt;Build secure, robust, and cost-optimized data pipelines and storage layers aligned with AWS best practices.&lt;/li&gt;&lt;li&gt;Manage and optimize data ingestion, transformation, and orchestration workflows on AWS.&lt;/li&gt;&lt;li&gt;Develop and maintain conceptual, logical, and physical data models for enterprise data platforms.&lt;/li&gt;&lt;li&gt;Implement data models across star schema, snowflake schema, and NoSQL structures aligned to business needs.&lt;/li&gt;&lt;li&gt;Ensure data consistency, integrity, and scalability across data environments.&lt;/li&gt;&lt;/ul&gt;&lt;div&gt;&lt;b&gt;Job Requirements&lt;/b&gt;&lt;span&gt;&lt;/span&gt;&lt;/div&gt;&lt;ul&gt;&lt;li&gt;Strong expertise in AWS services including Glue, Data Catalog, Lambda, S3, SQS, and SNS.&lt;/li&gt;&lt;li&gt;Hands-on experience with data modeling (star schema, snowflake, NoSQL structures).&lt;/li&gt;&lt;li&gt;Proficiency in Python, PySpark, and big data architecture to build ETL pipelines.&lt;/li&gt;&lt;li&gt;Experience in SQL development and performance optimization.&lt;/li&gt;&lt;li&gt;Ability to lead technical design discussions, code reviews, and architecture decision-making sessions.&lt;/li&gt;&lt;li&gt;Mandatory AWS professional certificate.&lt;/li&gt;&lt;/ul&gt;&lt;div&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Recruitment / Staffing&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Leading Client&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/817739/data-architect-aws-at-leading-client/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/817739/data-architect-aws-at-leading-client/</link>
  <title>[Full Time] Data Architect AWS at Leading Client</title>
  <dc:date>Tue, 24 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/819500/lead-software-engineer-at-leading-client/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;ul&gt; &lt;li&gt; 8-12years of good hands on exposure with Big Data technologies Pyspark (Data frame and SparkSQL), Hadoop, and Hive. &lt;/li&gt; &lt;li&gt; Good hands on experience of python/Pyspark. &lt;/li&gt; &lt;li&gt; Good understanding of SQL and data warehouse concepts. &lt;/li&gt; &lt;li&gt; Strong analytical, problem-solving, data analysis and research skills &lt;/li&gt; &lt;li&gt; Demonstrable ability to think outside of the box and not be dependent on readily available tools &lt;/li&gt; &lt;li&gt; Excellent communication, presentation and interpersonal skills are a must &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;strong&gt; Good to have: &lt;/strong&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; Hands-on experience with using Cloud Platform provided Big Data technologies- AWS . &lt;/li&gt; &lt;li&gt; Any job scheduler experience &lt;/li&gt; &lt;li&gt; Experience in migrating workload from on-premise to cloud and cloud to cloud migrations. &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;strong&gt; Roles &amp;amp; Responsibilities &lt;/strong&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; Develop efficient ETL pipelines as per business requirements, following the development standards and best practices. &lt;/li&gt; &lt;li&gt; Perform integration testing of different created pipeline in Cloud env. &lt;/li&gt; &lt;li&gt; Provide estimates for development, testing &amp;amp; deployments on different env. &lt;/li&gt; &lt;li&gt; Participate in code peer reviews to ensure our applications comply with best practices. &lt;/li&gt; &lt;li&gt; Create cost effective Cloud pipeline with required Cloud services i.e AWS. &lt;/li&gt; &lt;li&gt; &lt;strong&gt; Location &lt;/strong&gt; : - New Delhi,Indore, MP, Haryana, Madhya Pradesh , Uttar Pradesh, &lt;/li&gt; &lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Recruitment / Staffing&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Architect / Designer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Leading Client&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Indore&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/819500/lead-software-engineer-at-leading-client/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/819500/lead-software-engineer-at-leading-client/</link>
  <title>[Full Time] Lead software Engineer at Leading Client</title>
  <dc:date>Tue, 24 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/817712/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt; &lt;b&gt; &lt;br&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt; &lt;b&gt;Must have skills :&lt;/b&gt;Google Cloud Platform Architecture&lt;br&gt; &lt;b&gt; &lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;15&lt;/b&gt; year(s) of experience is required&lt;br&gt; &lt;b&gt; &lt;br&gt;Educational &lt;b&gt;Qualification&lt;/b&gt; :&lt;/b&gt;15 years full time education &lt;b&gt;Summary&lt;/b&gt;:&lt;br&gt; Project Role Description: Define the data requirements and structure for the applications. Model and design the application data structure, storage and integration. &lt;br&gt;Must have skills :AWS &amp;amp; Google Architecture Good to have skills :NA &lt;br&gt;15 year(s) of expected experience &lt;b&gt;Summary&lt;/b&gt;: &lt;br&gt;As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the architecture aligns with business needs and technical specifications. You will collaborate with various teams to ensure that data flows seamlessly across the organization, contributing to the overall efficiency and effectiveness of data management practices. &lt;b&gt;&lt;br&gt;Roles &amp;amp; Responsibilities&lt;br&gt;:&lt;/b&gt; Expected to be a Subject Matter Expert with deep knowledge and experience in data architecture and solutions blueprinting Should have influencing and advisory skills. Expected to provide solutions to problems that apply across multiple teams. Facilitate workshops and discussions to gather requirements and feedback from stakeholders. Continuously evaluate and improve data architecture practices to enhance performance and scalability. &lt;b&gt;&lt;br&gt;Professional &amp;amp; Technical Skills:&lt;br&gt;&lt;/b&gt; Must Have Skills:Proficiency in AWS &amp;amp; Google platform Architecture. Python and familiarity with DevOps and Infrastructure as code Strong understanding of data modeling techniques and best practices. Experience with cloud-based data storage solutions and integration strategies. Familiarity with data governance frameworks and compliance requirements. Ability to design and implement data pipelines for efficient data processing. &lt;b&gt;Additional Information:&lt;/b&gt;&lt;br&gt;- The candidate should have minimum 15 years of experience in Google Cloud Platform Architecture.&lt;br&gt;- This position is based at our Pune office.&lt;br&gt;- A 15 years full time education is required.&lt;b&gt;Qualification&lt;/b&gt;15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Pune&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/817712/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/817712/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Tue, 24 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/817740/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt; &lt;b&gt; &lt;br&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt; &lt;b&gt;Must have skills :&lt;/b&gt;Data Architecture Principles&lt;br&gt; &lt;b&gt; &lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;18&lt;/b&gt; year(s) of experience is required&lt;br&gt; &lt;b&gt; &lt;br&gt;Educational &lt;b&gt;Qualification&lt;/b&gt; :&lt;/b&gt;15 years full time education &lt;b&gt;Summary&lt;/b&gt;:&lt;br&gt;As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the data architecture aligns with business objectives and supports efficient data management practices. You will collaborate with various teams to understand their data needs and provide innovative solutions to enhance data accessibility and usability.&lt;b&gt;&lt;br&gt;Roles &amp;amp; Responsibilities:&lt;br&gt;&lt;/b&gt;&lt;br&gt;- Understanding the data model of AS IS Architecture and TO BE Architecture&lt;br&gt;- Collaborative work with external teams and internal teams to come with common data model and design the application data structure, storage, and integration components.&lt;br&gt;- Lead the analysis, design, and build effort for all data storage and processing components being custom built or reused.&lt;br&gt;- Work with the integration architects and specialists to design the integration solution.&lt;br&gt;- Ensure that the data storage and processing designs fulfill the requirements, including data volume, frequency needs, and long-term data growth.&lt;br&gt;- Assist in determining the cross-application and cross-component data standards, data distribution standards, and tuning strategies.&lt;br&gt;- Work with the technology architects and application designers to understand the data conversion requirements fully and to design the conversion procedures and applications.&lt;br&gt;- Review the database deliverables throughout development to ensure quality and traceability to requirements and adherence to all quality management plans and standards.&lt;br&gt;- Ensure that the developers and data architecture specialist responsible for developing the database thoroughly understand the requirements and designs.&lt;br&gt;- Work with other architects to ensure that all components work together to meet objectives and performance goals as defined in the requirements. Identify and communicate any cross-area or cross-release issues that may affect other areas of the project.&lt;br&gt;- Support the development and test teams with the creation of test data. Ensure test data conforms to data security requirements.&lt;br&gt;- Participate in quality management reviews.&lt;br&gt;- Ensure client data protection guidelines are followed for sensitive personal data in accordance to Accenture s Client Data Protection program.&lt;br&gt;- Work with customers and security architect to identify the access control and identity requirements for the application.&lt;b&gt;&lt;br&gt;Professional &amp;amp; Technical Skills:&lt;br&gt;&lt;/b&gt;&lt;br&gt; Hands-on relational, dimensional, and/or analytic experience (using RDBMS, dimensional, NoSQL data platform technologies and data ingestion protocols).&lt;br&gt; - Experience with data warehouse, data lake, and enterprise big data platforms in multi-data-center contexts required.&lt;br&gt;- Good knowledge of metadata management, data modeling, and related tools (Erwin or ER Studio or others) required.&lt;br&gt;- Ability to develop or revise the data architecture as new data requirements arise&lt;br&gt;- Ability to develop and manage all aspects of the data effort, including the plans, interdependencies, schedule, budget, tools, and required personnel for developing the database&lt;br&gt;- Data Governance&lt;br&gt;- Azure Data Fabric&lt;br&gt;- Azure Data Factory &lt;b&gt;Additional Information:&lt;/b&gt;&lt;br&gt;- Good communication and Interpersonal skills&lt;br&gt;- Able to work independently by collaborating with external teams&lt;br&gt; &lt;b&gt;Qualification&lt;/b&gt;15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Chennai&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/817740/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/817740/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Tue, 24 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/819089/power-bi-developer-at-tata-consultancy/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&lt;strong&gt;Roles and Responsibilities&lt;/strong&gt;&lt;br&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Design, develop, and deploy data visualizations using Microsoft Power BI to meet business requirements.&lt;br&gt;&lt;/li&gt;&lt;li&gt;Collaborate with stakeholders to gather requirements and understand their needs for reporting and analytics solutions.&lt;br&gt;&lt;/li&gt;&lt;li&gt;Develop complex reports, dashboards, and datasets using Power BI&apos;s modeling capabilities.&lt;br&gt;&lt;/li&gt;&lt;li&gt;Optimize report performance by tuning queries, improving data sources, and reducing load times.&lt;br&gt;&lt;/li&gt;&lt;li&gt;Troubleshoot issues related to report development, deployment, and consumption.&lt;br&gt;&lt;/li&gt;&lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Developer / Engineer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Tata Consultancy&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Kolkata&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/819089/power-bi-developer-at-tata-consultancy/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/819089/power-bi-developer-at-tata-consultancy/</link>
  <title>[Full Time] Power Bi Developer at Tata Consultancy</title>
  <dc:date>Mon, 23 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/819168/data-architect-at-tata-consultancy/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&lt;strong&gt;.Role &amp;amp; responsibilities&lt;/strong&gt; &lt;/p&gt;&lt;p&gt; TM forum data models, schemes, and databases to support efficient data storage, Database expertise, Cloud computing, Programming language SQL, Python. &lt;/p&gt;&lt;ul&gt;&lt;li&gt;Designing and implementing TM forum data models, schemes, and databases to support efficient data storage, retrieval, and analysis &lt;/li&gt;&lt;li&gt;Collaborating with stakeholders to understand business requirements and translating these requirements into technical specifications and data architecture designs &lt;/li&gt;&lt;li&gt;Define overall data architecture, data ingestion framework best practices etc. &lt;/li&gt;&lt;li&gt;Selecting and managing the best cloud services to support end-to-end Data platform implementation for the organizations specific needs &lt;/li&gt;&lt;li&gt;Develop and maintain conceptual, logical, and physical data models leveraging GCP services such as BigQuery, Cloud SQL etc. &lt;/li&gt;&lt;li&gt;Work on end-to-end data initiatives, from gathering requirements to cutover. &lt;/li&gt;&lt;li&gt;Translate business requirements from stakeholders into effective data model designs. &lt;/li&gt;&lt;li&gt;Play a key role in migrating data from legacy on-premises systems to cloud-native GCP solutions. &lt;/li&gt;&lt;li&gt; Data Management Knowledge, MySQL, Oracle, Data model tool (Erwin), Big Data Technologies, ETL knowledge. &lt;/li&gt;&lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Architect / Designer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Tata Consultancy&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Kolkata&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/819168/data-architect-at-tata-consultancy/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/819168/data-architect-at-tata-consultancy/</link>
  <title>[Full Time] Data Architect at Tata Consultancy</title>
  <dc:date>Mon, 23 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/817563/power-bi-developer-at-leading-client/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt;&lt;div&gt;&lt;p&gt;We are looking for a skilled Power BI Developer with 5 to 10 years of experience. The ideal candidate should have excellent expertise in DAX queries and functions in Power BI, as well as experience working with data from cloud data sources.&lt;/p&gt;&lt;div&gt;&lt;br&gt; &lt;b&gt;Roles and Responsibility&lt;/b&gt;&lt;br&gt;&lt;span&gt;&lt;/span&gt;&lt;/div&gt;&lt;ul&gt;&lt;li&gt;Design and develop interactive dashboards using Power BI.&lt;/li&gt;&lt;li&gt;Create and manage complex data models using DAX.&lt;/li&gt;&lt;li&gt;Develop and maintain databases on MS SQL Server BI Stack, including SSRS and Power Query.&lt;/li&gt;&lt;li&gt;Collaborate with stakeholders to understand business requirements and provide solutions.&lt;/li&gt;&lt;li&gt;Troubleshoot and resolve technical issues related to Power BI.&lt;/li&gt;&lt;li&gt;Optimize performance and ensure data quality in Power BI reports.&lt;/li&gt;&lt;/ul&gt;&lt;div&gt;&lt;b&gt;Job Requirements&lt;/b&gt;&lt;span&gt;&lt;/span&gt;&lt;/div&gt;&lt;ul&gt;&lt;li&gt;Excellent expertise in DAX queries and functions in Power BI.&lt;/li&gt;&lt;li&gt;Experience with tools and systems on MS SQL Server BI Stack, including SSRS and Power Query.&lt;/li&gt;&lt;li&gt;Experience working with data from cloud data sources.&lt;/li&gt;&lt;li&gt;Strong understanding of data modeling and database design principles.&lt;/li&gt;&lt;li&gt;Ability to work independently and collaboratively as part of a team.&lt;/li&gt;&lt;li&gt;Excellent problem-solving skills and attention to detail.&lt;/li&gt;&lt;li&gt;Notice period: Immediate.&lt;/li&gt;&lt;/ul&gt;&lt;/div&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Recruitment / Staffing&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Analyst&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Leading Client&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/817563/power-bi-developer-at-leading-client/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/817563/power-bi-developer-at-leading-client/</link>
  <title>[Full Time] Power BI Developer at Leading Client</title>
  <dc:date>Wed, 18 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/817717/data-architect-aws-at-virtusa/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&lt;strong&gt;Key Responsibilities:&lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt; Experience At least 10+ years of experience in AWS based projects&lt;/li&gt;&lt;li&gt;Proficiency in Python and PySpark for data engineering tasks. Big Data Strong knowledge of Big Data technologies and data warehousing concepts&lt;/li&gt;&lt;li&gt;AWS services Experience with AWS Data Engineering stack, including S3, RDS, Athena, Glue, Lambda, and Step Functions. SQL Strong SQL skills for data manipulation and querying. &lt;/li&gt;&lt;li&gt;Experience with CI CD tools like Terraform and Git Actions. Soft skills Good communication skills and ability to work in a multicultural team&lt;/li&gt;&lt;li&gt;Design and implement data pipelines Develop ETL jobs to ingest and move data within the AWS environment using tools like AWS Glue&lt;/li&gt;&lt;li&gt;Data storage and processing&lt;/li&gt;&lt;li&gt;Build and maintain systems for data collection storage processing and analysis using AWS services such as S3 RDS Athena and Redshift &lt;/li&gt;&lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Virtusa&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/817717/data-architect-aws-at-virtusa/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/817717/data-architect-aws-at-virtusa/</link>
  <title>[Full Time] Data Architect ( AWS ) at Virtusa</title>
  <dc:date>Wed, 18 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/817604/database-developer-at-idexcel/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&lt;b&gt;Roles and Responsibilities :&lt;/b&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Design, develop, test, and deploy scalable databases on AWS using PostgreSQL.&lt;/li&gt;&lt;li&gt;Migrate data from legacy systems to new databases with high accuracy and efficiency.&lt;/li&gt;&lt;li&gt;Collaborate with cross-functional teams to identify database requirements and implement solutions that meet business needs.&lt;/li&gt;&lt;li&gt;Troubleshoot issues related to database performance, security, and compliance.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;b&gt;Job Requirements :&lt;/b&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;4-8 years of experience in database development using PostgreSQL or similar technologies (e.g., MySQL).&lt;/li&gt;&lt;li&gt;Strong understanding of AWS services such as RDS, S3, Lambda functions etc. .&lt;/li&gt;&lt;li&gt;Experience with database migration tools and techniques for large-scale migrations.&lt;/li&gt;&lt;li&gt;Proficiency in writing efficient SQL queries for querying large datasets.&lt;/li&gt;&lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Recruitment / Staffing&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Developer / Engineer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Idexcel&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/817604/database-developer-at-idexcel/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/817604/database-developer-at-idexcel/</link>
  <title>[Full Time] Database Developer at Idexcel</title>
  <dc:date>Fri, 13 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/816896/data-architect-finance-at-kone/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;We are looking for a&lt;/p&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;&lt;b&gt;Data Architect, Finance Data Products&lt;/b&gt;&lt;/p&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;to join KONE IT Enterprise Data &amp;amp; Analytics team in Pune, India. The team is driving forward Data Foundation, a new way of working with data using the latest cloud technology Databricks on AWS leveraging modern multi-hop medallion data architecture. Data Foundation is a key enabler in KONE digital transformation creating ability to develop new scalable AI/ML and digital use cases by leveraging data across the whole organization. In our approach, data products play a vital role in business value generation. Driving optimized data architecture practices is crucial to ensure reusability and maintainability of data products.&lt;/p&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;Finance Data Architect is responsible for ensuring an effective data supply chain within Finance Data Domain on our Databricks platform. This includes designing and optimizing the data architecture from the data ingestion layer (bronze) to the business-optimized data consumption layers (gold), enhancing the value of available data products, and documenting the physical data architecture of the data products. Finance data domain encompasses data integrations from various application systems, and the finance data products are utilized by multiple consumers across the company for different use cases, functions, and teams.&lt;/p&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;The responsibilities of a Data Architect on Databricks platform include designing, developing, and creating data products to meet business data needs. This role also involves providing data architecture support to all stakeholders and offering guidance on technical issues arising from data-related problems and incidents, as well as recommending actions to mitigate these issues. Data Architect works closely with Product Owners, Data Engineers, Global Data Owners, Enterprise Architects, Data Scientists, and development teams across the organization.&lt;/p&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;&lt;b&gt;As Finance Data Architect your main responsibilities will be:&lt;/b&gt;&lt;/p&gt;&lt;p&gt;&lt;/p&gt;&lt;ul&gt; &lt;li&gt;Analyzing data requirements and forming information requirements specifications for data products. Translating business requirements into clear data requirements for end-to-end data pipeline creation and business optimized data consumption.&lt;/li&gt; &lt;li&gt;Designing data architecture and data products: Data architecture design, data modelling and data quality assurance activities.&lt;/li&gt; &lt;li&gt;Driving data related innovation creation, such as developing new data products, and providing guidance to data consumers about existing data products and data consumption capabilities.&lt;/li&gt; &lt;li&gt;Translating KONE&apos;s strategic objectives into data requirements on the Databricks platform, while incorporating a data architecture perspective into decision-making and planning activities to effectively manage complex implementations.&lt;/li&gt; &lt;li&gt;Identifying data architecture improvement needs and opportunities and ensure that the improvements are incorporated into roadmaps, and to source systems or data consuming systems roadmaps, when required.&lt;/li&gt; &lt;li&gt;Supporting in incident handling process when the incident is related to data. Helping in identifying data flow and data quality flaws causing these incidents and preparing change proposals to permanently fix them.&lt;/li&gt; &lt;li&gt;Data product cataloguing to data catalog to improve availability of data documentation, and usability of data by the data consumers.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;&lt;b&gt;To succeed in Data Architect role, following professional experience will play a key role:&lt;/b&gt;&lt;/p&gt;&lt;ul&gt; &lt;li&gt;Masters degree in engineering, computer science, management information systems, or a related field&lt;/li&gt; &lt;li&gt;12-15+ years of professional hands-on experience in data architecture, data modeling, data engineering, analytical data processing, software engineering in enterprise working environment.&lt;/li&gt; &lt;li&gt;Subject matter experience in finance processes and genuine passion understand the data produced in the finance business processes. Previous working experience in enterprise systems, such as SAP ERP FI/CO, FSCM.&lt;/li&gt; &lt;li&gt;Experience in working with complex data enterprise landscapes (raw, semi structured, and unstructured data with different patterns batch, real-time, streaming) from multiple business processes and applications.&lt;/li&gt; &lt;li&gt;Ability to write clear documentation and visualize data architecture design as data pipelines and data models (3NF, dimensional modeling) with clear information requirements. Ability to provide data architecture support on hands-on level to data engineering teams.&lt;/li&gt; &lt;li&gt;Proven coding skills with multiple languages (SQL, Python) with hands-on experience in industry standard software development life cycle methods, DevSecOps/DataOps practices spanning the full data product/data pipeline lifecycle.&lt;/li&gt; &lt;li&gt;Technical expertise in following tech stack: Databricks data engineering, multi-hop medallion architecture, Unity Catalog, Airflow, DBT, AWS Glue, AWS EMR, AWS Athena, Spark, Apache Flink, Apache Kafka, Terraform, Gitlab/Github. (Databricks and AWS certifications is an advantage).&lt;/li&gt; &lt;li&gt;Expertise in cybersecurity guidelines, data privacy, compliancy regulations and quality assurance practices, and cloud FinOps.&lt;/li&gt; &lt;li&gt;Experience in coaching and/or mentoring, proactive knowledge sharing and maturing DataOps practices.&lt;/li&gt; &lt;li&gt;Passion to utilize agile development methodologies and tools (Jira, Confluence, draw.io).&lt;/li&gt; &lt;li&gt;Ability to work collaborate and deliver in agile sprints. Ability to work in a global multi-cultural team across different countries and effectively collaborate within the teams. Ability to self-organize and be proactive, seek feedback, be courageous and resilient, and have excellent problem-solving skills.&lt;/li&gt; &lt;li&gt;Proficiency in spoken and written English language, and strong facilitation and communication skills.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;i&gt;Founded in 1910, KONE&lt;/i&gt; &lt;i&gt;is a global leader that provides elevators, escalators and automatic building doors, as well as solutions for maintenance and modernization that add value to buildings throughout their life cycle. Our mission is to improve the flow of urban life and make our worlds cities better places to live by providing innovative solutions that help make peoples journeys safe, convenient and reliable. Our operations in over 60 countries around the world has helped us achieve our position as an&lt;/i&gt; &lt;i&gt;innovation and sustainability&lt;/i&gt; &lt;i&gt;leader with repeated&lt;/i&gt; &lt;i&gt;recognitions by&lt;/i&gt; &lt;i&gt;Forbes, Corporate Knights for clean capitalism and others.&amp;nbsp;&lt;/i&gt;&lt;/p&gt;&lt;p&gt;&lt;i&gt;KONEs vision is to create the Best People Flow experience by providing ease, effectiveness and experience to our customers and users. In line with our strategy, Sustainable Success with Customers, we will focus on increasing the value we create for customers with new intelligent solutions and embed sustainability even deeper across all of our operations. By closer collaboration with customers and partners, KONE will increase the speed of bringing new services and solutions to the market.&lt;/i&gt;&lt;/p&gt;&lt;p&gt;&lt;b&gt;KONE Technology and Innovation Unit (KTI) &lt;/b&gt;is where we combine the physical world  escalators and elevators  with smart and connected digital systems. We are changing and improving the way billions of people move within buildings every day. We are on a mission to expand and develop new digital solutions that are based on emerging technologies.&lt;/p&gt;&lt;p&gt;&lt;b&gt;KONE IT &lt;/b&gt;is a global team of expert professionals working along with business functions and area teams to develop new capabilities and enable new business opportunities. We are trusted partners of KONE business lines and functions to develop, transform, manage and run their information technology solutions. We support KONE in its digital transformation journey by introducing e.g. digital cloud-based IT services, artificial intelligence (AI) and automation to support productivity, business growth and technological disruption.Sustainability, curious mindset and innovation areat the core of everything we do, and this makes us an integral part ofKONEs success.&lt;/p&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;IT professionals - KONE Corporation&lt;/p&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;&lt;i&gt;At KONE, we are focused on creating an innovative and collaborative working culture where we value the contribution of each individual. Employee engagement is a key focus area for us and we encourage participation and the sharing of information and ideas. Sustainability is an integral part of our culture and the daily practice. We follow ethical business practices and we seek to develop a culture of working together where co-workers trust and respect each other and good performance is recognized. In being a great place to work, we are proud to offer a range of experiences and opportunities that will help you to achieve your career and personal goals and enable you to live a healthy and balanced life.&lt;/i&gt;&lt;/p&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;Read more on &lt;span&gt;&lt;i&gt;&lt;u&gt;https://careers.kone.com/en/&lt;/u&gt;&lt;/i&gt;&lt;/span&gt;&lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Industrial Equipment / Machinery&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Kone&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Pune&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/816896/data-architect-finance-at-kone/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/816896/data-architect-finance-at-kone/</link>
  <title>[Full Time] Data Architect, Finance at Kone</title>
  <dc:date>Fri, 13 Mar 2026 09:25:47 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/816880/assoc-architect-it-data-architecture-at-baxter/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;div&gt; &lt;div&gt; We are looking for a highly skilled and experienced Assoc Architect, IT-Data Architecture to join our team in Bengaluru. The ideal candidate will have 10 to 15 years of experience in data architecture and analytics. &lt;/div&gt; &lt;div&gt; Roles and Responsibility &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; Design and develop complex applications for extracting, transforming, and loading data. &lt;/li&gt; &lt;li&gt; Ensure data quality and lead the design of logical data models and implement physical database structures. &lt;/li&gt; &lt;li&gt; Collaborate with other IT specialists to rapidly develop and deliver solutions that meet changing business needs. &lt;/li&gt; &lt;li&gt; Work with data owners to document data mappings and transformations to support effective downstream analytics and alerting. &lt;/li&gt; &lt;li&gt; Recommend and advise on data refresh, optimization of data, storage, and integration. &lt;/li&gt; &lt;li&gt; Attend various meetings as a Subject Matter Expert for ETL. &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; Job Requirements &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; Strong knowledge of Oracle, DataStage, Snowflake, AWS, and Python technologies. &lt;/li&gt; &lt;li&gt; Experience in designing and developing complex data applications. &lt;/li&gt; &lt;li&gt; Ability to collaborate with cross-functional teams to achieve business objectives. &lt;/li&gt; &lt;li&gt; Excellent communication and problem-solving skills. &lt;/li&gt; &lt;li&gt; Ability to work in a fast-paced environment and adapt to changing priorities. &lt;/li&gt; &lt;li&gt; Strong understanding of data architecture and analytics principles. &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;/div&gt; &lt;/div&gt; &lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Pharmaceutical &amp;amp; Life Sciences&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Baxter&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hubli&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/816880/assoc-architect-it-data-architecture-at-baxter/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/816880/assoc-architect-it-data-architecture-at-baxter/</link>
  <title>[Full Time] Assoc Architect, IT-Data Architecture at Baxter</title>
  <dc:date>Fri, 13 Mar 2026 05:50:52 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/816895/manager-data-operations-engineering-at-pfizer/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt; &lt;b&gt; ROLE SUMMARY&lt;/b&gt; &lt;/p&gt; &lt;p&gt; Pfizer s purpose is to deliver breakthroughs that change patients lives. Research and Development is at the heart of fulfilling Pfizer s purpose as we work to translate advanced science and technologies into the therapies and vaccines that matter most. Whether you are in the discovery sciences, ensuring drug safety and efficacy or supporting clinical trials, you will apply cutting edge design and process development capabilities to accelerate and bring the best in class medicines to patients around the world. &lt;/p&gt; &lt;p&gt; The &lt;b&gt; Data Operations Lead is a hands on data engineering leader &lt;/b&gt; responsible for operating, stabilizing, and continuously improving a large scale enterprise data platform that provides trusted data to more than 400 AI and analytical solutions across Pfizer Global Supply. &lt;/p&gt; &lt;p&gt; This role leads a technical data operations team while remaining deeply involved in complex investigations, code reviews, and engineering decisions. The primary objective is to ensure data reliability, responsiveness, and trust at enterprise scale, by applying strong data engineering practices, enforcing coding and operational standards, and delivering predictable service outcomes for business critical analytics and AI workloads&lt;b&gt; . &lt;/b&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;b&gt; ROLE RESPONSIBILITIES&lt;/b&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; &lt;b&gt; Data Engineering Operations Leadership&lt;/b&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; Manage a hands on data engineering operations team responsible for supporting production data pipelines, databases, and AI data products. Ensure issues are investigated and resolved using strong engineering discipline, clear ownership, and consistent technical standards&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; &lt;b&gt; Hands On Data Engineering &amp;amp; Troubleshooting&lt;/b&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; Remain actively hands on in complex investigations involving Python code, SQL logic, data pipelines, transformations, and database behavior. Review code, debug data issues, validate fixes, and guide engineers toward durable solutions. &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; &lt;b&gt; Engineering Root Cause Analysis &amp;amp; Prevention&lt;/b&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; Drive deep technical root cause analysis across ingestion, transformation, and consumption layers. Ensure recurring issues are addressed through code improvements, refactoring, better validations, or architectural fixes, rather than temporary workarounds. &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; &lt;b&gt; Engineering Standards, Code Quality &amp;amp; Reviews&lt;/b&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; Define, enforce, and evolve data engineering coding standards, including Python and SQL best practices, version control discipline, and code review expectations. Ensure all operational fixes meet quality, reliability, and maintainability standards even under production pressure. &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; &lt;b&gt; SLA Ownership Through Engineering Excellence&lt;/b&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; Define, implement, and improve SLAs for data operations by reducing manual intervention, improving automation, and raising engineering quality. Track operational performance and continuously improve response and resolution outcomes through engineering improvements. &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; &lt;b&gt; AI Application Front Line Support&lt;/b&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; Serve as the front line technical leader for AI and data driven applications, supporting model outputs, data pipelines feeding AI solutions, feature/embedding generation, and downstream data consumers. Diagnose data related AI issues and ensure fixes align with engineering best practices. &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; &lt;b&gt; Database &amp;amp; Platform Reliability&lt;/b&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; Own operational reliability across data platforms and databases, including schema management, query performance, access patterns, and data correctness. Ensure production data behavior is well understood, monitored, and documented. &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; &lt;b&gt; Stakeholder Communication &amp;amp; Trust Restoration&lt;/b&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; Provide clear, technically grounded communication to stakeholders regarding data issues, impacts, and remediation actions. Set realistic expectations and rebuild trust through predictable execution, transparency, and engineering credibility. &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;b&gt; Professional Experience and Educational Requirement&lt;/b&gt; &lt;/p&gt; &lt;p&gt; &lt;b&gt; Education / Experience&lt;/b&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;b&gt; Bachelor s degree (Master s preferred) in Computer Science, Data Engineering, or a related technical field. &lt;/b&gt; &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;b&gt; 5 - 10 years of hands on Data Engineering experience&lt;/b&gt; , including operating and supporting production data systems. &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;b&gt; Experience leading or acting as a Technical Lead for Data engineering or Data operations teams. &lt;/b&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;b&gt; Technical (Must Have)&lt;/b&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;b&gt; Strong hands on programming experience with one or more general purpose languages, including Python, SQL, Java, Scala, PySpark, C, C++, C#, Swift/Objective C, or JavaScript. &lt;/b&gt; &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;b&gt; Proven experience with data preparation, ingestion, and ETL/ELT frameworks, such as Airflow, dbt, Fivetran, Kafka, Informatica, Talend, Alteryx, or equivalent technologies. &lt;/b&gt; &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; Strong experience with software engineering best practices, including version control (Git, TFS, Subversion), CI/CD pipelines (Jenkins, Maven, Gradle, or similar), automated unit testing, and DevOps practices. &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; Hands on experience with cloud data platforms and storage technologies, such as Snowflake, Databricks, Amazon S3, Redshift, BigQuery, or equivalent platforms. &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; Demonstrated experience architecting and operating end to end data pipelines, using cloud based and/or on premises stacks. &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; Prior hands on experience as a data modeler is required, including dimensional modeling and analytical data model design. &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; Strong understanding of database management fundamentals, including schemas, tables, views, permissions, query performance, and operational troubleshooting. &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; Proven ability to diagnose and resolve data quality issues at the engineering level, including logic errors, transformation issues, and source to target alignment. &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;b&gt; Leadership &amp;amp; Ways of Working&lt;/b&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; Proven ability to lead a technical team while remaining hands on. &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; Strong problem solving skills with a bias toward engineering-driven fixes. &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; Ability to define and enforce SLAs in a technical operations environment. &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; Strong stakeholder communication skills, especially in high impact data incidents. &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;b&gt; PREFERRED QUALIFICATIONS&lt;/b&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; Experience supporting AI or analytics applications in production environments. &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; Experience operating data platforms in large scale or regulated enterprise environments. &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; Familiarity with ITIL aligned incident/problem management applied pragmatically within engineering teams. &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; Knowledge of cloud computing, machine learning, text analytics, NLP, and web based application architectures. &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; Knowledge of ontologies and graph databases (e. g. , Neo4j, Titan) and associated query languages is a plus. &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; and information from varied data sources, both new and pre-existing, into discernable insights and perspectives; takes a problem-solving approach by connecting analytical thinking with an understanding of business drivers&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;b&gt; Adaptable: &lt;/b&gt; Demonstrates flexibility in the face of shifting targets, thrives in new situations&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;b&gt; Pioneering:&lt;/b&gt; Pushes self and others to think about new innovation and digital frontiers and ways to conquer them&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;b&gt; Ambiguity Tolerant: &lt;/b&gt; Successfully navigates ambiguity to keep the organization on target and deliver against established timelines&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;b&gt; Exceptional Communicator:&lt;/b&gt; Can understand, translate, and distill the complex, technical findings of the team into commentary that facilitates effective decision making by senior leaders; can readily align interpersonal style with the individual needs of customers&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;b&gt; Highly Collaborative:&lt;/b&gt; Manages projects with and through others; shares responsibility and credit; develops self and others through teamwork; comfortable providing guidance and sharing expertise with others to help them develop their skills and perform at their best; helps others take appropriate risks; communicates frequently with team members earning respect and trust of the team &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;b&gt; Proactive Self-Starter&lt;/b&gt; : Takes an active role in one s own professional development; stays abreast of analytical trends, and cutting-edge applications of data&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;b&gt; Creative: &lt;/b&gt; Able to bring forth new ideas to improve our existing practices and takes calculated risks to innovate new capabilities within Business Analytics, with a focus on data products and analytics solutions&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;span&gt; &lt;b&gt; &lt;/b&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;b&gt; NON-STANDARD WORK SCHEDULE, TRAVEL OR ENVIRONMENT REQUIREMENTS&lt;/b&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; 20% travel may be required based on delivery and project priorities&lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;br&gt; Work Location Assignment: Hybrid&lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; Information &amp;amp; Business Tech &lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Pharmaceutical &amp;amp; Life Sciences&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Pfizer&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Mumbai&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/816895/manager-data-operations-engineering-at-pfizer/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/816895/manager-data-operations-engineering-at-pfizer/</link>
  <title>[Full Time] Manager, Data Operations &amp;amp; Engineering at Pfizer</title>
  <dc:date>Fri, 13 Mar 2026 03:12:59 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/816946/data-base-engineer-at-zensar/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;About the Role:&lt;/p&gt; &lt;p&gt;As a Database Engineer, you will be part of a dynamic team supporting the Tax Information Reporting Suite and Core Services data processing. In this role, you will design and optimize database solutions that power critical business applications. You ll work on data modeling, query optimization, performance tuning, and cross platform database development across RDBMS and NoSQL databases.&lt;/p&gt; &lt;p&gt;You will collaborate closely with cross functional teams to support application development, database deployments, and production troubleshooting. This role offers an excellent opportunity to deepen your expertise in database engineering, learn industry best practices, and build scalable, high performance data solutions in a fast paced environment.&lt;/p&gt; &lt;p&gt;What You Will Be Doing:&lt;/p&gt; &lt;p&gt; Design, develop, and optimize SQL queries, stored procedures, functions, and database objects across major RDBMS platforms (PostgreSQL, MySQL, Oracle, SQL Server, etc.).&lt;/p&gt; &lt;p&gt; Build and optimize SQL queries, data access layers, and backend processing workflows.&lt;/p&gt; &lt;p&gt; Investigate and resolve production issues, perform root cause analysis and implement long term fixes.&lt;/p&gt; &lt;p&gt; Develop, enhance, and maintain backend services within the TIRS platform.&lt;/p&gt; &lt;p&gt; Perform data modeling, schema design, normalization, and indexing to support application requirements and improve performance.&lt;/p&gt; &lt;p&gt; Troubleshoot and resolve database performance issues, query bottlenecks, and data integrity problems.&lt;/p&gt; &lt;p&gt; Work with Development teams to support database changes, deployments, and version control in development and production environments.&lt;/p&gt; &lt;p&gt; Utilize Unix/Linux commands for file handling, automation, logs, and running database tools/scripts.&lt;/p&gt; &lt;p&gt; Support integration efforts, automation initiatives, and backend performance improvements.&lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; Follow coding standards, documentation practices, and contribute to continuous improvement activities.&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt;What You Bring:&lt;/p&gt; &lt;p&gt; Bachelor s degree in Computer Science, Information Technology, or a related field.&lt;/p&gt; &lt;p&gt; Strong expertise in SQL with the ability to write, optimize, and troubleshoot complex queries, stored procedures, functions, and database objects.&lt;/p&gt; &lt;p&gt; Hands-on experience with major RDBMS platforms such as PostgreSQL, MySQL, Oracle, and SQL Server.&lt;/p&gt; &lt;p&gt; Solid understanding of data modeling, normalization, schema design, and indexing strategies to support scalable and high performance applications.&lt;/p&gt; &lt;p&gt; Experience diagnosing and resolving database performance issues, including query tuning and troubleshooting bottlenecks.&lt;/p&gt; &lt;p&gt; Familiarity with working alongside development teams to support database changes, deployments, and version control processes across environments.&lt;/p&gt; &lt;p&gt; Proficiency with Unix/Linux commands for file handling, automation, log analysis, and executing database scripts/tools.&lt;/p&gt; &lt;p&gt; Strong analytical and problem solving skills with a proactive approach to learning new technologies and tools.&lt;/p&gt; &lt;p&gt; Ability to collaborate effectively within cross functional teams and contribute to a positive, growth driven work culture.&lt;/p&gt; &lt;p&gt; Self motivated, adaptable, and committed to continuous improvement.&lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; Bachelor s degree in Computer Science, Information Technology, or a related field.&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; Design, develop, and optimize SQL queries, stored procedures, functions, and database objects across major RDBMS platforms (PostgreSQL, MySQL, Oracle, SQL Server, etc.).&lt;/p&gt; &lt;p&gt; Build and optimize SQL queries, data access layers, and backend processing workflows.&lt;/p&gt; &lt;p&gt; Investigate and resolve production issues, perform root cause analysis and implement long term fixes.&lt;/p&gt; &lt;p&gt; Develop, enhance, and maintain backend services within the TIRS platform.&lt;/p&gt; &lt;p&gt; Perform data modeling, schema design, normalization, and indexing to support application requirements and improve performance.&lt;/p&gt; &lt;p&gt; Troubleshoot and resolve database performance issues, query bottlenecks, and data integrity problems.&lt;/p&gt; &lt;p&gt; Work with Development teams to support database changes, deployments, and version control in development and production environments.&lt;/p&gt; &lt;p&gt; Utilize Unix/Linux commands for file handling, automation, logs, and running database tools/scripts.&lt;/p&gt; &lt;p&gt; Support integration efforts, automation initiatives, and backend performance improvements.&lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; Follow coding standards, documentation practices, and contribute to continuous improvement activities.&lt;/span&gt; &lt;/span&gt; &lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Miscellaneous&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Developer / Engineer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Zensar&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Pune&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/816946/data-base-engineer-at-zensar/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/816946/data-base-engineer-at-zensar/</link>
  <title>[Full Time] Data Base Engineer at Zensar</title>
  <dc:date>Fri, 13 Mar 2026 00:34:14 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/816908/eds-specialist-sppids3d-admin-at-worley/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;ul&gt; &lt;li&gt; &lt;span&gt; We deliver the world s most complex projects. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Work as part of a collaborative and inclusive team. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Enjoy a varied &amp;amp; challenging role. &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;br&gt; &lt;span&gt; &lt;strong&gt; Building on our past. Ready for the future &lt;/strong&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; We partner with our customers to deliver projects and create value over the life of their portfolio of assets. We solve complex problems by finding integrated data-centric solutions from the first stages of consulting and engineering to installation and commissioning, to the last stages of decommissioning and remediation. Join us and help drive innovation and sustainability in our projects. &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;br&gt; &lt;span&gt; &lt;strong&gt; The Role &lt;/strong&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;strong&gt; As an EDS Specialist- SPP&amp;amp;IDS3D Admin with Worley, &lt;/strong&gt; you will work closely with our existing team to deliver projects for our clients while continuing to develop your skills and experience etc. &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; Responsible for project set up, maintenance and support of the system. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Maintain the P&amp;amp;ID database for stability and maintenance. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Shall ensure the SPPID environment works efficiently. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Setup and management of new and existing engineering projects in standalone and workshare mode. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; SPPIDS3D Administration, setup, archive, workshare and replication. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Troubleshooting SPPID software issue and providing user-support to project resources &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Application database maintenance activities. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Continually seeking opportunities to increase end-user satisfaction. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; SPPIDS3D Customization required for project deliverables. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; SPPID S3DReport customization &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Implement automation tools that will benefit the project deliverables. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Utilize SPPID reporting to assist monitoring design quality and progress. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Prepare Project specific job notes and provide application trainings for designers and Admin. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Gain a full understanding of the scope, overall schedule, deliverables, milestones and coordination procedure. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Coordination with other discipline team leads. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Setup and support for SPPID integration with SmartPlant Foundation, SmartPlant 3D, SmartPlant Instrumentation and SmartPlant Electrical. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Help to create data flow between SPPID and other SmartPlant applications. &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;strong&gt; About You &lt;/strong&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; To be considered for this role it is envisaged you will possess the following attributes: &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; Bachelor s / Diploma degree in Engineering. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; 7+ years of relevant experience of SmartPlant 2D Administration. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Experience in conducting applications training and user support. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Knowledge of Engineering workflow in an EPC environment. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Strong analytical and problem-solving skills &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Ability to work in a fast-paced environment. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Effective oral and written communication skills required &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Having knowledge of SmartPlant Instrumentation &amp;amp; SmartPlant Electrical - Basic will be added advantage. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;strong&gt; Must have &lt;/strong&gt; &lt;/span&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; Knowledge of writing SQL Queries to extract data from SPPIDS3D. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Creating automation commands using Visual basic .Net in Smart3D and SPP&amp;amp;ID. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Programming skill in C#, VB.net. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Experience in Oil and Gas/Chemical/Petrochemical. &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; We want our people to be energized and empowered to drive sustainable impact. So, our focus is on a values-inspired culture that unlocks brilliance through belonging, connection and innovation. &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; We re building a diverse, inclusive and respectful workplace. Creating a space where everyone feels they belong, can be themselves, and are heard. &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; Worley takes personal data protection seriously and respects EU and local data protection laws. You can read our full Recruitment Privacy Notice Here. &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; Please note: If you are being represented by a recruitment agency you will not be considered, to be considered you will need to apply directly to Worley. &lt;/span&gt; &lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Oil &amp;amp; Gas&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Worley&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/816908/eds-specialist-sppids3d-admin-at-worley/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/816908/eds-specialist-sppids3d-admin-at-worley/</link>
  <title>[Full Time] Eds Specialist- Spp&amp;amp;ids3d Admin at Worley</title>
  <dc:date>Fri, 13 Mar 2026 00:30:50 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/816938/database-administrator-lead-at-idexcel/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&lt;b&gt;Roles and Responsibilities :&lt;/b&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Design, develop, and maintain large-scale databases on AWS using Amazon RDS, PostgreSQL, MySQL, and SQL Server.&lt;/li&gt;&lt;li&gt;Lead database migration projects from on-premise to cloud-based infrastructure using shell scripting languages like Bash or Python.&lt;/li&gt;&lt;li&gt;Collaborate with cross-functional teams to identify business requirements and design scalable database architectures that meet those needs.&lt;/li&gt;&lt;li&gt;Develop automated testing frameworks for database migrations and deployments using tools like Jenkins or GitLab CI/CD.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;b&gt;Job Requirements :&lt;/b&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;7-16 years of experience in database administration with expertise in PostgreSQL administration.&lt;/li&gt;&lt;li&gt;Strong understanding of AWS services such as S3, RDS, Lambda functions etc., including data modeling and schema design.&lt;/li&gt;&lt;li&gt;Proficiency in developing complex queries using various programming languages (e.g., Python).&lt;/li&gt;&lt;li&gt;Experience with database development best practices including normalization techniques.&lt;/li&gt;&lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Recruitment / Staffing&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Idexcel&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/816938/database-administrator-lead-at-idexcel/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/816938/database-administrator-lead-at-idexcel/</link>
  <title>[Full Time] Database Administrator Lead at Idexcel</title>
  <dc:date>Fri, 13 Mar 2026 00:05:21 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/816862/lead-azure-data-engineer-at-hdfc-bank/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&lt;strong&gt;Job Role&lt;/strong&gt; - Lead Azure Data Engineer&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Grade &lt;/strong&gt;- Asst VP&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Experience &lt;/strong&gt;- 11 - 15 Years&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Location &lt;/strong&gt;- Gurugram, Noida, Navi Mumbai &amp;amp; Bangalore &lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;em&gt;Preferable candidates who can join immediate, 30 Days &amp;amp; 45 Days notice period &lt;/em&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt; &lt;strong&gt;Job Responsibilities :&amp;nbsp;&lt;/strong&gt;&lt;br&gt;&lt;/p&gt;&lt;ol type=&quot;1&quot;&gt;&lt;li&gt;Fundamentals of DevOps, DevSecOps, CD / CI Pipeline using ADO&lt;/li&gt;&lt;li&gt;Good understanding of MPP Architecture, MySQL, RDS, MS&lt;/li&gt;&lt;li&gt;SQL DB, Oracle ,Postgres DB&lt;/li&gt;&lt;li&gt;Would need to interact with Software Integrators on a day-today basis.&lt;/li&gt;&lt;li&gt;Deployment and testing skills&lt;/li&gt;&lt;li&gt;Strong communication skills&lt;/li&gt;&lt;li&gt;ELT - Trino, Azure&amp;nbsp;Data&amp;nbsp;factory, Azure&amp;nbsp;Databricks, PySpark, Python, Iceberg, Parquet&lt;/li&gt;&lt;li&gt;CDC Tool like Qlik/ Golden Gate/Dbsium/IBM CDC, Kafka/ Solace  Scripting Shell, Python, Java,&lt;/li&gt;&lt;li&gt;Good Understanding of Azure Cloud&amp;nbsp;Engineering ADLS, Iceberg,&amp;nbsp;Databricks, AKS, RHEL&lt;/li&gt;&lt;li&gt;Good understanding of MS Project&lt;/li&gt;&lt;li&gt;Development skill using Trino, PySpark and&amp;nbsp;Databricks&lt;/li&gt;&lt;li&gt;Understanding of security basics, Encryption/Decryption,&lt;/li&gt;&lt;li&gt;Understanding of IT hardware basics: Unix/Windows servers, RAM/CPU utilization, storage on cloud&lt;/li&gt;&lt;li&gt;Basic project management skills for preparation of a high-level project plan.&lt;/li&gt;&lt;li&gt;Understanding of DNS and Load Balancing, and their use.&lt;/li&gt;&lt;li&gt;&amp;nbsp;Understanding of DR/BCP/Recovery/Backup conceptually for DB and Apply Servers&lt;/li&gt;&lt;/ol&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Banking&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Hdfc Bank&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Noida, Gurugram&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/816862/lead-azure-data-engineer-at-hdfc-bank/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/816862/lead-azure-data-engineer-at-hdfc-bank/</link>
  <title>[Full Time] Lead Azure Data Engineer at Hdfc Bank</title>
  <dc:date>Thu, 12 Mar 2026 22:53:27 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/816999/sr-analyst-iii-erp-package-applications-at-dxc-technology/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt; 5+ years of experience as an Apps DBA &amp;amp; Database Administrator. &lt;/p&gt; &lt;p&gt; Strong working experience with Oracle (11g/12c/19c) and Oracle EBS (R12 preferred). &lt;/p&gt; &lt;p&gt; Familiarity with SQL Server Oracle database administration and backup strategies. &lt;/p&gt; &lt;p&gt; Proficient in SQL, PL/SQL, and query performance optimization. &lt;/p&gt; &lt;p&gt; comfortable working in Linux/Unix server environments and automating with shell scripts. &lt;/p&gt; &lt;p&gt; Understanding of ITIL practices and change/release management workflows. &lt;/p&gt; &lt;p&gt; Oracle EBS application configuration. &lt;br&gt; Starting and stopping the application and Database &lt;br&gt; Monitoring the application related products &lt;br&gt; Establish and maintain backup and recovery policies and procedures. &lt;br&gt; Take care of the Database design and implementation. &lt;br&gt; Implement and maintain database security &lt;br&gt; Perform general technical troubleshooting &lt;br&gt; Interface with Third Party EBS service provider as necessary for technical support. &lt;br&gt; Patch Management and Version Control &lt;br&gt; Perform database tuning and performance monitoring. &lt;br&gt; Perform application tuning and performance monitoring &lt;br&gt; Cloning development and Test from production environment. &lt;/p&gt; &lt;p&gt; &lt;/p&gt; &lt;p&gt; &lt;/p&gt; &lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;DXC Technology&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/816999/sr-analyst-iii-erp-package-applications-at-dxc-technology/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/816999/sr-analyst-iii-erp-package-applications-at-dxc-technology/</link>
  <title>[Full Time] Sr Analyst Iii Erp Package Applications at DXC Technology</title>
  <dc:date>Thu, 12 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/816237/power-bi-developer-mid-level-at-infosys/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;strong&gt;Educational Requirements&lt;/strong&gt; &lt;div&gt;Bachelor Of Technology,Bachelor of Engineering &lt;/div&gt; &lt;strong&gt;Service Line&lt;/strong&gt; &lt;div&gt; Enterprise Package Application Services&lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;div&gt; &lt;strong&gt;Responsibilities&lt;/strong&gt; &lt;div&gt; A day in the life of an Infoscion&lt;li&gt;As part of the Infosys consulting team, your primary role would be to get to the heart of customer issues, diagnose problem areas, design innovative solutions and facilitate deployment resulting in client delight.&lt;/li&gt;&lt;li&gt;You will develop a proposal by owning parts of the proposal document and by giving inputs in solution design based on areas of expertise. &lt;/li&gt;&lt;li&gt;You will plan the activities of configuration, configure the product as per the design, conduct conference room pilots and will assist in resolving any queries related to requirements and solution design&lt;/li&gt;&lt;li&gt;You will conduct solution/product demonstrations, POC/Proof of Technology workshops and prepare effort estimates which suit the customer budgetary requirements and are in line with organization??s financial guidelines &lt;/li&gt;&lt;li&gt;Actively lead small projects and contribute to unit-level and organizational initiatives with an objective of providing high quality value adding solutions to customers. If you think you fit right in to help our clients navigate their next in their digital transformation journey, this is the place for you!&lt;/li&gt;&lt;/div&gt; &lt;/div&gt; &lt;/div&gt; &lt;/div&gt; &lt;strong&gt;Additional Responsibilities:&lt;/strong&gt; &lt;div&gt; &lt;li&gt;Strong communication and interpersonal skills, with the ability to work effectively in a collaborative team environment&lt;/li&gt;&lt;li&gt;Certification in Power BI (e.g., Microsoft Certified: Data Analyst Associate) is a plus&lt;/li&gt;&lt;li&gt;Understanding of cloud platforms (Azure preferred) and experience with Azure services related to Power BI (e.g., Azure SQL Database, Azure Data Factory). Location of posting - Infosys Ltd. is committed to ensuring you have the best experience throughout your journey with us. We currently have open positions in a number of locations across India - Bangalore, Pune, Hyderabad, Mysore, Kolkata, Chennai, Chandigarh, Trivandrum, Indore, Nagpur, Mangalore, Noida, Bhubaneswar, Coimbatore, Jaipur, Hubli, Vizag. While we work in accordance with business requirements, we shall strive to offer you the location of your choice, where possible. &lt;/li&gt;&lt;/div&gt; &lt;strong&gt;Technical and Professional Requirements:&lt;/strong&gt; &lt;div&gt; &lt;li&gt;Minimum 5 years of experience as Power BI Developer and Capacity Administrator, with demonstrated track record of successful project delivery&lt;/li&gt;&lt;li&gt;Monitor and manage Power BI service capacity to ensure optimal performance&lt;/li&gt;&lt;li&gt;Proactively identify and resolve capacity-related bottlenecks&lt;/li&gt;&lt;li&gt;Plan and execute capacity scaling based on business growth and usage patterns&lt;/li&gt;&lt;li&gt;Configure and optimize Power BI workspaces, gateways, and dataflows&lt;/li&gt;&lt;li&gt;Perform complex technical task e.g. integration with R/Python&lt;/li&gt;&lt;li&gt;Understanding of advanced calculations and DAX functions&lt;/li&gt;&lt;li&gt;Understanding of SQL, relational database management system, data modeling, and normalization&lt;/li&gt;&lt;li&gt;Implement security policies, maintain compliance with internal data governance policies and access controls to safeguard sensitive data within Power BI&lt;/li&gt;&lt;li&gt;Educate users on best practices for data visualization, governance, and self-service analytics&lt;/li&gt;&lt;li&gt;Maintain complex Power BI reports for scalability, manageability, performance, and re-use &lt;/li&gt;&lt;/div&gt; &lt;strong&gt;Preferred Skills:&lt;/strong&gt; &lt;div&gt; &lt;div&gt; Technology-&amp;gt;Business Intelligence - Reporting-&amp;gt;Actuate &lt;/div&gt; &lt;div&gt; Technology-&amp;gt;Enterprise Package Processes-&amp;gt;SAP Business Intelligence Implementation Methodology &lt;/div&gt; &lt;div&gt; Technology-&amp;gt;SAP Technical-&amp;gt;SAP Business Intelligence &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;/div&gt; &lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Analyst&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Infosys&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Chennai&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/816237/power-bi-developer-mid-level-at-infosys/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/816237/power-bi-developer-mid-level-at-infosys/</link>
  <title>[Full Time] Power BI Developer (Mid-Level) at Infosys</title>
  <dc:date>Wed, 11 Mar 2026 17:59:06 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/815870/custom-software-engineer-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Custom Software Engineer&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Design, build and configure applications to meet business process and application requirements. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Informatica PowerCenter&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;3&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As an Application Developer, you will be responsible for designing, building, and configuring applications to meet business process and application requirements. You will play a crucial role in developing solutions that align with organizational goals and objectives. Roles &amp;amp; Responsibilities:&lt;li&gt;Expected to perform independently and become an SME.&lt;/li&gt;&lt;li&gt;Required active participation/contribution in team discussions.&lt;/li&gt;&lt;li&gt;Contribute in providing solutions to work related problems.&lt;/li&gt;&lt;li&gt;Collaborate with cross-functional teams to analyze business requirements and translate them into technical solutions.&lt;/li&gt;&lt;li&gt;Develop and maintain high-quality software design and architecture.&lt;/li&gt;&lt;li&gt;Implement best practices for software development and ensure code quality.&lt;/li&gt;&lt;li&gt;Troubleshoot and debug applications to optimize performance.&lt;/li&gt;&lt;li&gt;Stay updated on emerging technologies and trends in the software development industry. Professional &amp;amp; Technical &lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;&lt;br&gt;Must To Have &lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;&lt;br&gt;Proficiency in Informatica PowerCenter.&lt;/li&gt;&lt;li&gt;Strong understanding of ETL processes and data integration.&lt;/li&gt;&lt;li&gt;Experience with SQL and relational databases.&lt;/li&gt;&lt;li&gt;Knowledge of data warehousing concepts and methodologies.&lt;/li&gt;&lt;li&gt;Hands-on experience in designing and implementing data migration strategies. Additional Information:&lt;/li&gt;&lt;li&gt;The candidate should have a minimum of 3 years of experience in Informatica PowerCenter.&lt;/li&gt;&lt;li&gt;This position is based at our Bengaluru office.&lt;/li&gt;&lt;li&gt;A 15 years full-time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Chennai&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/815870/custom-software-engineer-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/815870/custom-software-engineer-at-accenture/</link>
  <title>[Full Time] Custom Software Engineer at Accenture</title>
  <dc:date>Wed, 11 Mar 2026 16:34:06 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/815842/application-developer-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Application Developer&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Design, build and configure applications to meet business process and application requirements.&lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Google BigQuery&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;Teradata BI&lt;br&gt;Minimum &lt;b&gt;5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As an Application Developer, you will design, build, and configure applications to meet business process and application requirements. A typical day involves collaborating with various teams to understand their needs, developing solutions that align with business objectives, and ensuring that applications are optimized for performance and usability. You will also engage in problem-solving activities, providing support and enhancements to existing applications while ensuring that all development aligns with best practices and organizational standards. &lt;br&gt;Roles &amp;amp; Responsibilities:&lt;li&gt;Expected to be an SME.&lt;/li&gt;&lt;li&gt;Collaborate and manage the team to perform.&lt;/li&gt;&lt;li&gt;Responsible for team decisions.&lt;/li&gt;&lt;li&gt;Engage with multiple teams and contribute on key decisions.&lt;/li&gt;&lt;li&gt;Provide solutions to problems for their immediate team and across multiple teams.&lt;/li&gt;&lt;li&gt;Facilitate knowledge sharing sessions to enhance team capabilities.&lt;/li&gt;&lt;li&gt;Monitor project progress and ensure timely delivery of application features. Professional &amp;amp; Technical &lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;&lt;br&gt;Must To Have &lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;&lt;br&gt;Proficiency in Google BigQuery.&lt;/li&gt;&lt;li&gt;Good To Have &lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;Experience with Teradata BI.&lt;/li&gt;&lt;li&gt;Strong understanding of data warehousing concepts and ETL processes.&lt;/li&gt;&lt;li&gt;Experience in SQL and database management.&lt;/li&gt;&lt;li&gt;Familiarity with cloud computing platforms and services. Additional Information:&lt;/li&gt;&lt;li&gt;The candidate should have minimum 5 years of experience in Google BigQuery.&lt;/li&gt;&lt;li&gt;This position is based in Mumbai.&lt;/li&gt;&lt;li&gt;A 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;ETL Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Mumbai&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/815842/application-developer-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/815842/application-developer-at-accenture/</link>
  <title>[Full Time] Application Developer at Accenture</title>
  <dc:date>Wed, 11 Mar 2026 16:30:47 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/815918/application-support-engineer-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;Project Role :&lt;/b&gt;Application Support Engineer&lt;b&gt;Project Role Description :&lt;/b&gt;Act as software detectives, provide a dynamic service identifying and solving issues within multiple components of critical business systems. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Electronic Medical Records (EMR)&lt;b&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;2&lt;/b&gt; year(s) of experience is required&lt;b&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;&lt;b&gt;Summary&lt;/b&gt;:&lt;/b&gt; As an Application Support Engineer, you will act as a software detective, providing a dynamic service that identifies and resolves issues within various components of critical business systems. Your typical day will involve collaborating with team members to troubleshoot software problems, analyzing system performance, and ensuring that applications run smoothly to support business operations effectively. You will engage with users to understand their challenges and work towards implementing solutions that enhance system functionality and user experience. &lt;b&gt;Roles Responsibilities:&lt;/b&gt; &lt;li&gt;Expected to perform independently and become an SME.&lt;/li&gt;&lt;li&gt;Required active participation/contribution in team discussions.&lt;/li&gt;&lt;li&gt;Contribute in providing solutions to work related problems.&lt;/li&gt;&lt;li&gt;Assist in the documentation of processes and solutions to enhance team knowledge.&lt;/li&gt;&lt;li&gt;Engage with stakeholders to gather requirements and feedback for continuous improvement. &lt;b&gt;Professional Technical Skills:&lt;/b&gt; &lt;br&gt;&lt;b&gt;Must To Have Skills:&lt;/b&gt;&lt;br&gt;&lt;/li&gt;&lt;li&gt; Proficiency in Electronic Medical Records (EMR).&lt;/li&gt;&lt;li&gt;Strong understanding of software troubleshooting techniques.&lt;/li&gt;&lt;li&gt;Experience with system performance monitoring tools.&lt;/li&gt;&lt;li&gt;Familiarity with database management and query optimization.&lt;/li&gt;&lt;li&gt;Ability to communicate technical information effectively to non-technical users. &lt;b&gt;Additional Information:&lt;/b&gt; &lt;/li&gt;&lt;li&gt;The candidate should have minimum 2 years of experience in Electronic Medical Records (EMR).&lt;/li&gt;&lt;li&gt;This position is based at our Chennai office.&lt;/li&gt;&lt;li&gt;A 15 years full time education is required.&lt;b&gt;Qualification&lt;/b&gt; 15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Chennai&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/815918/application-support-engineer-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/815918/application-support-engineer-at-accenture/</link>
  <title>[Full Time] Application Support Engineer at Accenture</title>
  <dc:date>Wed, 11 Mar 2026 15:16:02 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/816294/etl-developer-at-virtusa/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;ul&gt;&lt;li&gt;&lt;/li&gt;&lt;li&gt;Responsibilities include translating requirements and data mapping documents into technical designs developing enhancing and maintaining code following best practices and standards supporting regression and system testing efforts debugging and resolving issues found during testing or production communicating status issues and blockers with the project team and supporting continuous improvement by identifying and addressing opportunities.&lt;/li&gt;&lt;li&gt;Basic qualifications include a Bachelors degree or military experience in a related field preferably computer science with three to five years of ETL development experience in a data warehouse environment deep understanding of enterprise data warehousing best practices and standards strong software engineering experience designing developing and operating robust highly scalable cloud infrastructure services strong experience with Python PySpark DataStage ETL and SQL development proven experience with cloud infrastructure projects including hands on migration to public cloud platforms such as AWS and Azure preferably Snowflake knowledge of cybersecurity practices operations risk management processes architectural requirements and threats and vulnerabilities including incident response methodologies strong communication and interpersonal skills and strong organizational skills with the ability to work independently and collaboratively.&lt;/li&gt;&lt;li&gt;Preferred qualifications include AWS Certified Solutions Architect Associate AWS Certified DevOps Engineer Professional or AWS Certified Solutions Architect Professional experience defining future state roadmaps for data warehouse applications experience leading development teams experience in the financial services or banking industry.&lt;/li&gt;&lt;li&gt;Core skills include ETL data warehouse concepts Snowflake CI CD tools such as Jenkins and GitHub Python and DataStage.&lt;/li&gt;&lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Banking&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;ETL Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Virtusa&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/816294/etl-developer-at-virtusa/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/816294/etl-developer-at-virtusa/</link>
  <title>[Full Time] ETL Developer at Virtusa</title>
  <dc:date>Wed, 11 Mar 2026 12:17:23 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/815784/app-dev-support-engineer-iii-at-conduent/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;Responsibilities&lt;/b&gt; &lt;br&gt; &lt;li&gt;Design and develop highly scalable web-based applications based on business needs. &lt;/li&gt; &lt;li&gt;Design and customize software for client use with the aim of optimizing operational efficiency. &lt;/li&gt; &lt;li&gt;A deep understanding of, and ability to use and explain all aspects of application integration in .NET and data integration with SQL Server and associated technologies and standards &lt;/li&gt; &lt;li&gt;Strong background in building and operating SAAS platforms using the Microsoft technology stack with modern services-based architectures. &lt;/li&gt; &lt;li&gt;Ability to recommend and configure Azure subscriptions and establish connectivity &lt;/li&gt; &lt;li&gt;Work with IT teams to setup new application architecture requirements &lt;/li&gt; &lt;li&gt;Coordinate releases with Quality Assurance Team and implement SDLC workflows and better source code integration. &lt;/li&gt; &lt;li&gt;Implement build process and continuous build integration with Unit Testing framework. &lt;/li&gt; &lt;li&gt;Develop and maintain a thorough understanding of business needs from both technical and business perspectives &lt;/li&gt; &lt;li&gt;Assist and mentor junior team members to enforce development guidelines. &lt;/li&gt; &lt;li&gt;Take technical ownership of products and provide support with quick turnaround. &lt;/li&gt; &lt;li&gt;Effectively prioritize and execute tasks in a high-pressure environment &lt;/li&gt;&lt;b&gt; Qualifications / Experience&lt;/b&gt; &lt;br&gt; &lt;li&gt;Bacheloru2019s/masteru2019s degree in computer science / computer engineering &lt;/li&gt; &lt;li&gt;Minimum of 6+ yearsu2019 experience in building enterprise scale windows and web application using Microsoft .NET technologies. &lt;/li&gt; &lt;li&gt;5+ years of experience in C#, ASP.NET MVC and.Net Core Web API &lt;/li&gt; &lt;li&gt;1+ years of experience in Angular 2 or higher &lt;/li&gt; &lt;li&gt;Experience in any of the following are also desirableBootstrap, Knockout, entity framework, nhibernate, Subversion, Linq, Asynchronous Module Definition (such as requirejs) &lt;/li&gt; &lt;li&gt;In depth knowledge on design patterns and unit testing frameworks. &lt;/li&gt; &lt;li&gt;Experience with Agile application development. &lt;/li&gt; &lt;li&gt;SQL Server development, performance tuning (SQL Server 2014/2016) and troubleshooting &lt;/li&gt; &lt;li&gt;Ability to work with a sense of urgency and attention to detail &lt;/li&gt; &lt;li&gt;Excellent oral and written communication skills. &lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;BPM / BPO&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Conduent&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/815784/app-dev-support-engineer-iii-at-conduent/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/815784/app-dev-support-engineer-iii-at-conduent/</link>
  <title>[Full Time] App Dev &amp;amp; Support Engineer III at Conduent</title>
  <dc:date>Wed, 11 Mar 2026 12:14:53 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/816967/gw-developer-_datahub-at-zensar/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt;Must have skills &lt;/div&gt; &lt;ul&gt; &lt;li&gt;6-9 years of Experience in P&amp;amp;C Insurance on Guidewire DataHub/InfoCenter Platform.&lt;/li&gt; &lt;li&gt;Specialist Certifications on DHIC.&lt;/li&gt; &lt;li&gt;Experience on SQL Server databases.&lt;/li&gt; &lt;li&gt;Able to create PL/SQL stored procedures. &lt;/li&gt; &lt;li&gt;Experience on Guidewire ClaimCenter/ PolicyCenter/ BillingCenter data models. &lt;/li&gt; &lt;li&gt;Experience on SAP BODS ETL design &amp;amp; Administration.&lt;/li&gt; &lt;li&gt;Experience on Data Warehousing that includes analysis and development of Dataflows, mappings using needed transformations using BODS.&lt;/li&gt; &lt;li&gt;Experience in mapping Guidewire Insurance Suite of products (PC/BC/CC/CM) to DHIC.&lt;/li&gt; &lt;li&gt;Works with business in identifying detailed analytical and operational reporting/extracts requirements.&lt;/li&gt; &lt;li&gt;Experience building downstream extracts using DHIC.&lt;/li&gt; &lt;li&gt; Able to create Microsoft SQL / ETL complex queries.&lt;/li&gt; &lt;li&gt;Experience on Updating Data Specifications&lt;/li&gt; &lt;li&gt;Experience on DataHub and InfoCenter Initial loads and Delta loads.&lt;/li&gt; &lt;li&gt;Experience on DataHub and InfoCenter Guidewire Commit and Rollback utility.&lt;/li&gt; &lt;li&gt;Experience on Extending entities &amp;amp; attributes in DataHub and InfoCenter&lt;/li&gt; &lt;li&gt;Experience on Ref &amp;amp; Config Data spreadsheet maintece.&lt;/li&gt; &lt;li&gt;Experience on Scheduling loads, solving integrity check issues, and balancing errors.&lt;/li&gt; &lt;li&gt;Experience on performance improvements on SAP BODS workflows especially on SQL database as source.&lt;/li&gt; &lt;li&gt;Works with business in identifying detailed analytical and operational reporting/extracts requirements.&lt;/li&gt; &lt;li&gt;Must have at least one DHIC on-premises to DHIC Cloud Upgrade experience &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt;Good to have Skills:&lt;/div&gt; &lt;ul&gt; &lt;li&gt;SAP BODS administration, CICD.&lt;/li&gt; &lt;li&gt;Data Modelling Experience.&lt;/li&gt; &lt;/ul&gt; &lt;div&gt;Mentioned above.&lt;/div&gt; &lt;div&gt;Mentioned above.&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Zensar&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Pune&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/816967/gw-developer-_datahub-at-zensar/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/816967/gw-developer-_datahub-at-zensar/</link>
  <title>[Full Time] Gw- Developer _datahub at Zensar</title>
  <dc:date>Wed, 11 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/816884/vp-chief-ai-architect-at-pfizer/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;div&gt; &lt;div&gt; &lt;div&gt; &lt;b&gt; &lt;span&gt; ROLE SUMMARY &lt;/span&gt; &lt;/b&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Pfizer is seeking a Vice President, Chief AI Architect to define and steward the enterprise AI architecture vision, roadmap, and governance that enable breakthroughs at scale across R&amp;amp;D, Manufacturing, and Commercial. This role brings integrated, end &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; to &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; end thinking across data, models, platforms, and products; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; curates &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; innovation from a strong external network; and ensures secure, reliable, cost &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; effective patterns for AI solutions (including LLMs and agentic systems) in a regulated environment. &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; The Chief AI Architect partners closely with the Head of AI &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; CoE &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; (who builds and &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; operates &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; our AI &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; platforms) to ensure that reference architectures, standards, and guardrails are translated into scalable, reusable capabilities. Together, they accelerate adoption, improve reliability and time &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; to &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; value, and uphold Responsible AI principles. &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;b&gt; &lt;span&gt; &lt;br&gt; ROLE RESPONSIBILITIES &lt;/span&gt; &lt;/b&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;div&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;b&gt; &lt;span&gt; Enterprise AI Architecture Vision &amp;amp; Strategy &lt;/span&gt; &lt;/b&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Define the target &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; state AI architecture (data, model, application, and infrastructure layers) that integrates advanced analytics, ML/LLM, knowledge/semantic technologies, and operational systems. &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Establish the North Star for foundational capabilities: RAG and retrieval pipelines, agents/orchestration, vector search, feature stores, model registries, observability, evaluation, safety layers &lt;/span&gt; &lt;span&gt; , etc. &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Set architecture principles that balance innovation speed with compliance, reliability, and total cost of ownership. &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;b&gt; &lt;span&gt; R &lt;/span&gt; &lt;span&gt; eference Architectures, Patterns &amp;amp; Standards &lt;/span&gt; &lt;/b&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Publish reference architectures and blueprints for priority use cases (e.g., scientific discovery assistants, &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; GxP &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; impacted automation, manufacturing QA, field engagement copilots). &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Define &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; LLMOps &lt;/span&gt; &lt;span&gt; / &lt;/span&gt; &lt;span&gt; MLOps &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; standards (model lifecycle, evaluation, red &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; teaming, monitoring, rollback, drift, lineage, documentation). &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Codify security, privacy, and Responsible AI guardrails: data minimization, isolation patterns, PII/PHI handling, human &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; in &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; the &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; loop, explainability, auditability, model risk controls. &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;b&gt; &lt;span&gt; Roadmap &amp;amp; Architecture Governance &lt;/span&gt; &lt;/b&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Own the enterprise AI architecture roadmap; align with business strategy and portfolio funding. &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Chair an AI Architecture Review Board (AARB) and design authorities that provide fast, pragmatic guidance and approvals. &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Manage technology lifecycle (emerging adopt scale retire) for AI frameworks, model classes, toolchains, and platforms. &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;b&gt; &lt;span&gt; Innovation Scouting &amp;amp; External Ecosystem &lt;/span&gt; &lt;/b&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Maintain a strong external network ( &lt;/span&gt; &lt;span&gt; hyperscalers &lt;/span&gt; &lt;span&gt; , model labs, hardware vendors, startups, academia, standards bodies) to scout, evaluate, and curate innovations. &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Run evidence &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; based proofs &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; of &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; value and bake successful patterns into the reference stack; shape build/partner/buy decisions with the Head of AI &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; CoE &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; and Procurement. &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Represent Pfizer s interests in industry consortia and standards discussions; encourage selective open &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; source contribution where it &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; benefits &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; the enterprise. &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;b&gt; &lt;span&gt; Partnership with the Head of AI &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; CoE &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; (Operating Model) &lt;/span&gt; &lt;/b&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; You set the blueprint; the &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; CoE &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; builds/ &lt;/span&gt; &lt;span&gt; operates &lt;/span&gt; &lt;span&gt; . Co &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; own the platform backlog prioritization and ensure reference patterns &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; productized capabilities. &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Define SLAs/SLOs, performance benchmarks, and &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; cost &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; guardrails in collaboration with the &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; CoE &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; and SRE/FinOps. &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Jointly drive developer enablement: SDKs, templates, golden paths, sandboxes, and documentation. &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;b&gt; &lt;span&gt; Risk, Compliance &amp;amp; Validation by Design &lt;/span&gt; &lt;/b&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Embed model risk management, validation evidence, and audit &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; ready documentation into patterns &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;span&gt; fit for &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; GxP &lt;/span&gt; &lt;span&gt; , 21 CFR Part 11, GDPR/HIPAA contexts as applicable. &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Institutionalize AI safety: pre &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; production evaluations, content safety, adversarial testing, policy enforcement, incident response playbooks. &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;b&gt; &lt;span&gt; Interoperability &amp;amp; Reuse &lt;/span&gt; &lt;/b&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Promote API &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; first and event &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; driven integration between AI services and enterprise systems; enable semantic/knowledge layers to unify context across domains. &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Maximize reuse via shared components (prompt libraries, evaluation suites, connectors, datasets, ontologies), tracked through measurable reuse rates. &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;b&gt; &lt;span&gt; Performance, Capacity &amp;amp; Cost Engineering &lt;/span&gt; &lt;/b&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Set performance engineering practices for training, fine &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; tuning, and inference (e.g., quantization, distillation, caching, batching). &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Partner with Infra/Cloud/HPC on capacity planning (GPU/accelerator &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; utilization &lt;/span&gt; &lt;span&gt; ), autoscaling, and cost/per &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; inference optimization. &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;b&gt; &lt;span&gt; Talent, Community &amp;amp; Enablement &lt;/span&gt; &lt;/b&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Build an AI Architecture Guild that mentors domain architects and product teams. &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Develop playbooks, training, and office hours to raise architectural quality and speed across the enterprise. &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;b&gt; &lt;span&gt; QUALIFICATIONS &lt;/span&gt; &lt;/b&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;/div&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;div&gt; &lt;b&gt; &lt;span&gt; Basic Qualifications &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/b&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; BS/BA degree required, higher degree preferred or relevant experience, 15+ years in architecture or advanced engineering leadership, with 7+ years designing AI/ML platforms and solutions at enterprise scale. &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Demonstrated mastery across LLMs/foundation models, retrieval/RAG, agents/orchestration, evaluation, model safety, and &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; LLMOps &lt;/span&gt; &lt;span&gt; / &lt;/span&gt; &lt;span&gt; MLOps &lt;/span&gt; &lt;span&gt; . &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Deep experience in regulated environments (life sciences/healthcare or equivalent), including validation, auditability, and documentation rigor. &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Proven ability to create reference architectures and standards and drive adoption through governance that enables speed (not bureaucracy). &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Strong external network and &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; a track record &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; of curating innovation (ecosystem scouting, &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; PoVs &lt;/span&gt; &lt;span&gt; , build/partner/buy). &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Hands &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; on credibility with modern stacks: vector databases, feature stores, model registries, observability, event &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; driven and API &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; first integration, cloud/HPC, and performance engineering for training and inference. &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Exceptional influence and storytelling skills; able to align senior stakeholders and simplify complex trade &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; offs. &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;b&gt; &lt;span&gt; Preferred &lt;/span&gt; &lt;/b&gt; &lt;span&gt; &lt;b&gt; Qualifications &lt;/b&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Prior leadership of enterprise or domain architecture for AI &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; heavy portfolios. &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Contributions to open &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; source, standards, reference implementations, or published thought leadership. &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Familiarity with data mesh/semantic layers/knowledge graphs, and FinOps/SRE practices for AI platforms. &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;div&gt; &amp;nbsp; &lt;/div&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;div&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/div&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Pharmaceutical &amp;amp; Life Sciences&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Architect / Designer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Pfizer&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Chennai&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/816884/vp-chief-ai-architect-at-pfizer/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/816884/vp-chief-ai-architect-at-pfizer/</link>
  <title>[Full Time] VP, Chief AI Architect at Pfizer</title>
  <dc:date>Wed, 11 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/817014/sr-analyst-i-software-engineering-at-dxc-technology/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;b&gt;Job Description:&lt;/b&gt; &lt;/div&gt; &lt;div&gt; &lt;b&gt;Required &lt;/b&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt;Bachelors degree in Computer Science/Information Technology or related field&lt;/li&gt; &lt;li&gt;8+ years of experience and knowledge in .NET/.Net Core Framework (ASP.NET, v3.1+, v6.x preferred), &lt;b&gt;C#, Web API, .Net Core, AWS&lt;/b&gt; and SQL/PostgreSQL development.&lt;/li&gt; &lt;li&gt;8+ years of experience in design patterns and enterprise standards in application design&lt;/li&gt; &lt;li&gt;5+ years of experience on relational database design and development&lt;/li&gt; &lt;li&gt;Lead team in plan, design, develop and launch of systems and solutions&lt;/li&gt; &lt;li&gt;Work closely with your team members&lt;/li&gt; &lt;li&gt;Perform peer reviews for the developed code before delivering to client&lt;/li&gt; &lt;li&gt;Mentoring technical team on development and coding standards&lt;/li&gt; &lt;li&gt;Business Acumen&lt;/li&gt; &lt;li&gt;Creativity &amp;amp; Innovation&lt;/li&gt; &lt;li&gt;Persuasion&lt;/li&gt; &lt;li&gt;&quot;Big Picture&quot; Thinker&lt;/li&gt; &lt;li&gt;Strategic&lt;/li&gt; &lt;li&gt;Effective Communication (verbal + written)&lt;/li&gt; &lt;li&gt;Understands Technology systems &amp;amp; applications&lt;/li&gt; &lt;li&gt;Exceptional at coding and on time delivery of quality components and or applications&lt;/li&gt; &lt;li&gt;Exceptional at component and unit testing of following standard practices and methodologies&lt;/li&gt; &lt;li&gt;Exceptional utilizing the technologies and domain knowledge with the delivery of developed components or integrated components&lt;/li&gt; &lt;li&gt;Exceptional at working problems of moderate scope where analysis of situations or data requires review of a variety of factors&lt;/li&gt; &lt;li&gt;Exceptional at triage or analysis of situations for production support&lt;/li&gt; &lt;li&gt;Excel with on time delivery with minimal supervision&lt;/li&gt; &lt;li&gt;Effective verbal and written communicator&lt;/li&gt; &lt;li&gt;Effective participant of requirements gathering, requirements analysis&lt;/li&gt; &lt;li&gt;Must be fluent in English (written and spoken)&lt;/li&gt; &lt;li&gt;Successful completion of interview required to meet job qualification&lt;/li&gt; &lt;li&gt;Reliable, punctual attendance is an essential function of the position&lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;b&gt;Preferred&lt;/b&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt;AWS experience and/or Certification&lt;/li&gt; &lt;li&gt;NoSQL experience&lt;/li&gt; &lt;li&gt;Familiarity with code repositories like TFS, GitHub&lt;/li&gt; &lt;li&gt;Airline Crew Systems development is a PLUS&lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt;At DXC Technology, we believe strong connections and community are key to our success. Our work model prioritizes in-person collaboration while offering flexibility to support wellbeing, productivity, individual work styles, and life circumstances. We re committed to fostering an inclusive environment where everyone can thrive.&lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;b&gt; &lt;span&gt;Recruitment fraud is a scheme in which fictitious job opportunities are offered to job seekers typically through online services, such as false websites, or through unsolicited emails claiming to be from the company. These emails may request recipients to provide personal information or to make payments as part of their illegitimate recruiting process. DXC does not make offers of employment via social media networks and DXC never asks for any money or payments from applicants at any point in the recruitment process, nor ask a job seeker to purchase IT or other equipment on our behalf. More information on employment scams is available&lt;/span&gt; &lt;/b&gt; &lt;span&gt;here&lt;/span&gt; &lt;span&gt; &lt;b&gt; &lt;i&gt;.&lt;/i&gt; &lt;/b&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;DXC Technology&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/817014/sr-analyst-i-software-engineering-at-dxc-technology/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/817014/sr-analyst-i-software-engineering-at-dxc-technology/</link>
  <title>[Full Time] Sr Analyst I Software Engineering at DXC Technology</title>
  <dc:date>Wed, 11 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/815516/data-architect-at-barclays/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;Join us as a &lt;b&gt;Data Architect &lt;/b&gt;Barclays, responsible for supporting the successful delivery of Location Strategy projects to plan, budget, agreed quality and governance standards. Youll spearhead the evolution of our digital landscape, driving innovation and excellence. You will harness cutting-edge technology to revolutionise our digital offerings, ensuring unparalleled customer experiences.&lt;p&gt;&lt;/p&gt; &lt;p&gt;To be successful as a &lt;b&gt;Data Architect &lt;/b&gt;you should have experience with: -&lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Design and govern end to end data architectures, covering ingestion, processing, storage, and analytics.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Define lakehouse and distributed data processing architectures leveraging Apache Spark/Airflow on AWS (e.g. EMR / Glue).&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Establish logical and physical data models optimised for analytics and large scale data processing.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Ensure architectural alignment with enterprise data standards and cloud best practices.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Partner with data engineering teams to define Spark based data processing patterns for batch and large scale transformations.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Guide development of Python based data pipelines for ingestion, transformation, and orchestration.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Support optimisation of SQL based analytics, including complex queries, performance tuning, and consumption layer design.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Bachelor s degree in computer science, Engineering, or related discipline.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;AWS certifications preferred.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Strong experience designing Cloud based data platforms in enterprise environments.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Hands on experience with Apache Spark, Airflow for large scale data processing and transformation.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Strong programming experience in Python for data engineering and pipeline development.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Advanced SQL skills, including:&lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Analytical queries.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Performance optimization.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Data validation and reconciliation.&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Solid understanding of data modelling, distributed data processing, and analytics workloads.&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt;Some other highly valued skills may include: -&lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Deep knowledge of AWS data services.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;S3, Glue, Athena, Redshift, EMR, Lake Formation.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Experience designing batch and analytical processing architectures using Spark and SQL.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Good to have Astronomer and Databricks knowledge.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Understanding of data ingestion, hydration, curation, and consumption patterns.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Proven experience in enterprise data architecture and governance.&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Strong understanding of data quality, lineage, metadata, and security controls.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Experience working in regulated environments (banking / financial services preferred).&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Ability to translate business requirements into scalable Spark / SQL based data solutions.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Strong stakeholder communication and architecture documentation skills.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Comfortable operating across architecture, engineering, and delivery teams.&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt;You may be assessed on the key critical skills relevant for success in role, such as risk and controls, change and transformation, business acumen strategic thinking and digital and technology, as well as job-specific technical skills.&lt;/p&gt; &lt;p&gt;This role is based in Pune.&lt;/p&gt; &lt;p&gt; &lt;b&gt;Purpose of the role&lt;/b&gt; &lt;/p&gt; &lt;p&gt;To design, develop, and implement solutions to complex business problems, collaborating with stakeholders to understand their needs and requirements, and design and implement solutions that meet those needs and create solutions that balance technology risks against business delivery, driving consistency. &lt;/p&gt; &lt;p&gt; &lt;b&gt;Accountabilities&lt;/b&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt;Design and development of solutions as products that can evolve, meeting business requirements that align with modern software engineering practices and automated delivery tooling. This includes identification and implementation of the technologies and platforms.&lt;/li&gt; &lt;li&gt;Targeted design activities that apply an appropriate workload placement strategy and maximise the benefit of cloud capabilities such as elasticity, serverless, containerisation etc.&lt;/li&gt; &lt;li&gt;Best practice designs incorporating security principles (such as defence in depth and reduction of blast radius) that meet the Bank s resiliency expectations.&lt;/li&gt; &lt;li&gt;Solutions that appropriately balance risks and controls to deliver the agreed business and technology value.&lt;/li&gt; &lt;li&gt;Adoption of standardised solutions where they fit. If no standard solutions fit, feed into their ongoing evolution where appropriate.&lt;/li&gt; &lt;li&gt;Fault finding and performance issues support to operational support teams, leveraging available tooling.&lt;/li&gt; &lt;li&gt;Solution design impact assessment in terms of risk, capacity and cost impact, inc. estimation of project change and ongoing run costs.&lt;/li&gt; &lt;li&gt;Development of the requisite architecture inputs required to comply with the banks governance processes, including design artefacts required for architecture, privacy, security and records management governance processes.&lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;b&gt;Assistant Vice President Expectations&lt;/b&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt;To advise and influence decision making, contribute to policy development and take responsibility for operational effectiveness. Collaborate closely with other functions/ business divisions.&lt;/li&gt; &lt;li&gt;Lead a team performing complex tasks, using well developed professional knowledge and skills to deliver on work that impacts the whole business function. Set objectives and coach employees in pursuit of those objectives, appraisal of performance relative to objectives and determination of reward outcomes&lt;/li&gt; &lt;li&gt;If the position has leadership responsibilities, People Leaders are expected to demonstrate a clear set of leadership behaviours to create an environment for colleagues to thrive and deliver to a consistently excellent standard. The four LEAD behaviours are: L Listen and be authentic, E Energise and inspire, A Align across the enterprise, D Develop others.&lt;/li&gt; &lt;li&gt;OR for an individual contributor, they will lead collaborative assignments and guide team members through structured assignments, identify the need for the inclusion of other areas of specialisation to complete assignments. They will identify new directions for assignments and/ or projects, identifying a combination of cross functional methodologies or practices to meet required outcomes.&lt;/li&gt; &lt;li&gt;Consult on complex issues; providing advice to People Leaders to support the resolution of escalated issues.&lt;/li&gt; &lt;li&gt;Identify ways to mitigate risk and developing new policies/procedures in support of the control and governance agenda.&lt;/li&gt; &lt;li&gt;Take ownership for managing risk and strengthening controls in relation to the work done.&lt;/li&gt; &lt;li&gt;Perform work that is closely related to that of other areas, which requires understanding of how areas coordinate and contribute to the achievement of the objectives of the organisation sub-function.&lt;/li&gt; &lt;li&gt;Collaborate with other areas of work, for business aligned support areas to keep up to speed with business activity and the business strategy.&lt;/li&gt; &lt;li&gt;Engage in complex analysis of data from multiple sources of information, internal and external sources such as procedures and practises (in other areas, teams, companies, etc).to solve problems creatively and effectively.&lt;/li&gt; &lt;li&gt;Communicate complex information. Complex information could include sensitive information or information that is difficult to communicate because of its content or its audience.&lt;/li&gt; &lt;li&gt;Influence or convince stakeholders to achieve outcomes.&lt;/li&gt; &lt;/ul&gt; &lt;p&gt;All colleagues will be expected to demonstrate the Barclays Values of Respect, Integrity, Service, Excellence and Stewardship our moral compass, helping us do what we believe is right. They will also be expected to demonstrate the Barclays Mindset to Empower, Challenge and Drive the operating manual for how we behave.&lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Financial Services&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Barclays&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Pune&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/815516/data-architect-at-barclays/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/815516/data-architect-at-barclays/</link>
  <title>[Full Time] Data architect at Barclays</title>
  <dc:date>Wed, 11 Mar 2026 10:55:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/815818/custom-software-engineer-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Custom Software Engineer&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Design, build and configure applications to meet business process and application requirements. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Snowflake Data Warehouse&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;3&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As an Application Developer, you will design, build, and configure applications to meet business process and application requirements. A typical day involves collaborating with team members to understand project needs, developing application features, and ensuring that the solutions align with business objectives. You will also engage in testing and troubleshooting to enhance application performance and user experience, while continuously seeking opportunities for improvement and innovation in application development. Roles &amp;amp; Responsibilities:&lt;li&gt;Expected to perform independently and become an SME.&lt;/li&gt;&lt;li&gt;Required active participation/contribution in team discussions.&lt;/li&gt;&lt;li&gt;Contribute in providing solutions to work related problems.&lt;/li&gt;&lt;li&gt;Assist in the documentation of application processes and workflows.&lt;/li&gt;&lt;li&gt;Engage in code reviews to ensure quality and adherence to best practices. Professional &amp;amp; Technical &lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;&lt;br&gt;Must To Have &lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;&lt;br&gt;Proficiency in Snowflake Data Warehouse.&lt;/li&gt;&lt;li&gt;Good To Have &lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;Experience with data integration tools and ETL processes.&lt;/li&gt;&lt;li&gt;Strong understanding of database design and management.&lt;/li&gt;&lt;li&gt;Familiarity with cloud computing concepts and services.&lt;/li&gt;&lt;li&gt;Experience in application testing and debugging methodologies. Additional Information:&lt;/li&gt;&lt;li&gt;The candidate should have minimum 3 years of experience in Snowflake Data Warehouse.&lt;/li&gt;&lt;li&gt;This position is based at our Hyderabad office.&lt;/li&gt;&lt;li&gt;A 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/815818/custom-software-engineer-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/815818/custom-software-engineer-at-accenture/</link>
  <title>[Full Time] Custom Software Engineer at Accenture</title>
  <dc:date>Wed, 11 Mar 2026 10:20:57 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/816090/custom-software-engineer-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Custom Software Engineer&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Design, build and configure applications to meet business process and application requirements. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;C# Programming Language&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As an Application Developer, you will design, build, and configure applications to meet business process and application requirements. A typical day involves collaborating with team members to understand project needs, developing innovative solutions, and ensuring that applications are aligned with business objectives. You will engage in problem-solving activities and contribute to the overall success of the projects you are involved in, while also managing your team&apos;&apos;s performance and decisions effectively. &lt;br&gt;Roles Responsibilities:&lt;li&gt;Expected to be an SME.&lt;/li&gt;&lt;li&gt;Collaborate and manage the team to perform.&lt;/li&gt;&lt;li&gt;Responsible for team decisions.&lt;/li&gt;&lt;li&gt;Engage with multiple teams and contribute on key decisions.&lt;/li&gt;&lt;li&gt;Provide solutions to problems for their immediate team and across multiple teams.&lt;/li&gt;&lt;li&gt;Facilitate knowledge sharing sessions to enhance team capabilities.&lt;/li&gt;&lt;li&gt;Monitor project progress and ensure timely delivery of milestones. Professional Technical Skills:Must To Have Skills:Proficiency in C# Programming Language.&lt;/li&gt;&lt;li&gt;Strong understanding of object-oriented programming principles.&lt;/li&gt;&lt;li&gt;Experience with .NET framework and related technologies.&lt;/li&gt;&lt;li&gt;Familiarity with database management systems and SQL.&lt;/li&gt;&lt;li&gt;Ability to write clean, maintainable, and efficient code. &lt;br&gt;Additional Information:&lt;/li&gt;&lt;li&gt;The candidate should have minimum 7 years of experience in C# Programming Language.&lt;/li&gt;&lt;li&gt;This position is based out of Client Site at Chennai with minimum 3 days mandatorily at office.&lt;/li&gt;&lt;li&gt;A 15 years full time education is required.&lt;br&gt;Qualification15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Developer / Engineer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Chennai&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/816090/custom-software-engineer-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/816090/custom-software-engineer-at-accenture/</link>
  <title>[Full Time] Custom Software Engineer at Accenture</title>
  <dc:date>Wed, 11 Mar 2026 09:49:05 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/816263/sql-dba-expert-at-zensar/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;ul&gt; &lt;li&gt; Database Administration Expertise: Strong knowledge of Microsoft SQL Server (installation, configuration, backup/recovery, performance tuning). &lt;/li&gt; &lt;li&gt; Cloud Migration Skills: Experience with AWS Database Migration Service (DMS), RDS for SQL Server, and hybrid migration strategies. &lt;/li&gt; &lt;li&gt; Security &amp;amp; Compliance Focus &lt;/li&gt; &lt;li&gt; Database Administration Expertise: Strong knowledge of Microsoft SQL Server (installation, configuration, backup/recovery, performance tuning). &lt;/li&gt; &lt;li&gt; Cloud Migration Skills: Experience with AWS Database Migration Service (DMS), RDS for SQL Server, and hybrid migration strategies. &lt;/li&gt; &lt;li&gt; Security &amp;amp; Compliance Focus &lt;/li&gt; &lt;li&gt; Database Administration Expertise: Strong knowledge of Microsoft SQL Server (installation, configuration, backup/recovery, performance tning). &lt;/li&gt; &lt;li&gt; Cloud Migration Skills: Experience with AWS Database Migration Service (DMS), RDS for SQL Server, and hybrid migration strategies. &lt;/li&gt; &lt;li&gt; Security &amp;amp; Compliance Focus &lt;/li&gt; &lt;/ul&gt;Disclaimer:The job location mentioned in this description is based on publicly available information or company headquarters. Candidates are advised to verify the exact job location directly with the employer before applying.&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Miscellaneous&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Zensar&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Pune&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/816263/sql-dba-expert-at-zensar/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/816263/sql-dba-expert-at-zensar/</link>
  <title>[Full Time] SQL DBA Expert at Zensar</title>
  <dc:date>Wed, 11 Mar 2026 09:44:40 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/815580/manager-digital-and-technology-finance-analytics-delivery-at-pfizer/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt; &lt;span&gt; &lt;b&gt; &lt;/b&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;b&gt;ROLE SUMMARY&lt;/b&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt;Pfizer is a renowned pharmaceutical company that improves the lives of hundreds of millions of patients worldwide. Within Pfizer, the Digital &amp;amp; Technology division is dedicated to leading the digital transformation in the pharmaceutical industry. Leveraging our expertise in technology and innovation, we support Pfizer in achieving this ambitious goal.&lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt;Join Pfizers Digital &amp;amp; Technology Core Finance Creation Center team and be a part of our mission to revolutionize the pharmaceutical industry through digital innovation. Together, we will continue to make a positive impact on the lives of countless patients around the world.&lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt;As part of the Digital &amp;amp; Technology Core Finance Creation Center organization, our team focuses on creating, configuring, and operating digital products, platforms, and solutions that facilitate Pfizers core finance functions. This includes areas such as Financial Consolidations, Planning, Budgeting &amp;amp; Forecasting, Compliance, and Internal and External Reporting.&lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt;This Manager role is a hands-on technical role responsible for developing financial reporting visualizations leveraging on-premise and cloud platforms and delivering new/next-gen features using leading BI tools including AI capabilities and industrializing the solution build.&lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt;Pfizer seeks a candidate with strong understanding of SDLC processes, documentation and their application to solving business problems. The candidate is highly self-motivated, eager to explore new technologies, and is effective when working in a team environment. A strong aptitude towards self-development and growth is highly desired.&lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;b&gt; &lt;/b&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;b&gt;ROLE RESPONSIBILITIES&lt;/b&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Build integrated solutions on a consistent foundation architecture and manage interdependencies with other Digital solutions to create a world-class end-user experience&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Design and Implement data models to support large scale business intelligence reporting, including creation of attributes, facts, and metrics&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Leverage AI capabilities on the reporting solution for faster and sharper decision making&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Identify continuous improvement opportunities to improve system performance and end-user functionality&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Drive Operational efficiencies through continuous improvements&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;b&gt;BASIC QUALIFICATIONS&lt;/b&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Bachelor s Degree in Computer Science, Information Technology, or similar field&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;6+ years experience working in development and/or support teams&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt;Strong understanding of any leading Business intelligence tools (PowerBI suite, Fabric, etc).&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt;Strong knowledge on Modelling the data for reporting/dashboarding&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt;Good knowledge on other Power platform including admin management &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt;Experience with GenAI capabilities. This includes understanding the principles of these frameworks, as well as practical experience in building applications with them&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt;Experience with database technologies (Aurora, Postgres, Snowflake, Oracle)&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Experience with ETL Tools (AWS Glue, Informatica, Talend, etc.)&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt;Experience with database technologies (Aurora, Postgres, Snowflake, Oracle)&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Excellent organizational and time management capabilities&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Strong interpersonal skills in building customer relationships&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Ability to work independently and or in a global team setting across time zones.&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;span&gt; &lt;b&gt;PREFERRED QUALIFICATIONS&lt;/b&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;In depth knowledge of Cloud technologies&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Pharmaceutical life science domain experience&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Exposure to Finance reporting applications&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Exposure to DevOps processes&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;PowerBI / AI certification&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Familiar with SDLC/Agile methodologies&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;b&gt;NON-STANDARD WORK SCHEDULE, TRAVEL OR ENVIRONMENT REQUIREMENTS&lt;/b&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Job will require working with global teams and applications. Flexible working schedule will be needed on occasion to accommodate planned agile sprint planning and system releases as well as unplanned/on-call level 3 support.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Potential post-pandemic travel required for critical project work &lt;/p&gt;&lt;/li&gt; &lt;/ul&gt; &lt;br&gt;Work Location Assignment: Hybrid&lt;br&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Pfizer is an equal opportunity employer and complies with all applicable equal employment opportunity legislation in each jurisdiction in which it operates.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; Information &amp;amp; Business Tech &lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Pharmaceutical &amp;amp; Life Sciences&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Manager&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Pfizer&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Chennai&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/815580/manager-digital-and-technology-finance-analytics-delivery-at-pfizer/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/815580/manager-digital-and-technology-finance-analytics-delivery-at-pfizer/</link>
  <title>[Full Time] Manager Digital and Technology, Finance Analytics Delivery at Pfizer</title>
  <dc:date>Wed, 11 Mar 2026 08:51:27 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/816250/custom-software-engineer-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Custom Software Engineer&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Design, build and configure applications to meet business process and application requirements. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;C# Programming Language&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As an Application Developer, you will design, build, and configure applications to meet business process and application requirements. A typical day involves collaborating with team members to understand project needs, developing innovative solutions, and ensuring that applications are aligned with business objectives. You will engage in problem-solving activities and contribute to the overall success of the projects you are involved in, while also managing your team&apos;&apos;s performance and decisions effectively. Roles &amp;amp; Responsibilities:&lt;li&gt;Expected to be an SME.&lt;/li&gt;&lt;li&gt;Collaborate and manage the team to perform.&lt;/li&gt;&lt;li&gt;Responsible for team decisions.&lt;/li&gt;&lt;li&gt;Engage with multiple teams and contribute on key decisions.&lt;/li&gt;&lt;li&gt;Provide solutions to problems for their immediate team and across multiple teams.&lt;/li&gt;&lt;li&gt;Facilitate knowledge sharing sessions to enhance team capabilities.&lt;/li&gt;&lt;li&gt;Monitor project progress and ensure timely delivery of milestones. Professional &amp;amp; Technical &lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;&lt;br&gt;Must To Have &lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;&lt;br&gt;Proficiency in C# Programming Language.&lt;/li&gt;&lt;li&gt;Strong understanding of object-oriented programming principles.&lt;/li&gt;&lt;li&gt;Experience with .NET framework and related technologies.&lt;/li&gt;&lt;li&gt;Familiarity with database management systems and SQL.&lt;/li&gt;&lt;li&gt;Ability to write clean, maintainable, and efficient code. Additional Information:&lt;/li&gt;&lt;li&gt;The candidate should have minimum 7 years of experience in C# Programming Language.&lt;/li&gt;&lt;li&gt;This position is based out of Client Site at Chennai with minimum 3 days mandatorily at office.&lt;/li&gt;&lt;li&gt;A 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Developer / Engineer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Chennai&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/816250/custom-software-engineer-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/816250/custom-software-engineer-at-accenture/</link>
  <title>[Full Time] Custom Software Engineer at Accenture</title>
  <dc:date>Wed, 11 Mar 2026 07:28:49 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/816042/application-developer-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Application Developer&lt;b&gt;Project Role Description :&lt;/b&gt;Design, build and configure applications to meet business process and application requirements. &lt;b&gt;Must have skills :&lt;/b&gt;Data Engineering&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;Amazon Web Services (AWS), Python (Programming Language)Minimum &lt;b&gt;5&lt;/b&gt; year(s) of experience is required&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;br&gt;&lt;b&gt;Summary&lt;/b&gt;:As an Application Developer, you will design, build, and configure applications to meet business process and application requirements. A typical day involves collaborating with various teams to understand their needs, developing solutions that align with business objectives, and ensuring that applications are optimized for performance and usability. You will also engage in problem-solving activities, providing support and enhancements to existing applications while keeping abreast of the latest technologies and methodologies in application development. &lt;br&gt;Roles &amp;amp; Responsibilities:- Expected to be an SME.- Collaborate and manage the team to perform.- Responsible for team decisions.- Engage with multiple teams and contribute on key decisions.- Provide solutions to problems for their immediate team and across multiple teams.- Mentor junior team members to enhance their skills and knowledge.- Continuously evaluate and improve application performance and user experience. Professional &amp;amp; Technical &lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;li&gt;- Must To Have &lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;Proficiency in Data Engineering.- Good To Have &lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;Experience with Amazon Web Services (AWS), Python (Programming Language).- Strong understanding of data modeling and database design principles.- Experience with ETL processes and data pipeline development.- Familiarity with big data technologies and frameworks. Additional Information:- The candidate should have minimum 5 years of experience in Data Engineering.- This position is based at our Bengaluru office.- A 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/816042/application-developer-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/816042/application-developer-at-accenture/</link>
  <title>[Full Time] Application Developer at Accenture</title>
  <dc:date>Wed, 11 Mar 2026 06:38:26 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/816049/architect-at-cognizant/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;br&gt;&lt;/div&gt;&lt;br&gt;&lt;p&gt;&lt;b&gt;Job Summary&lt;/b&gt;&lt;/p&gt;&lt;p&gt;The Data Scientist designs develops and implements advanced analytics and generative AI models to deliver predictive and prescriptive insights from large-scale structured and unstructured data. This role partners with cross-functional teams to translate business challenges into data-driven solutions leveraging industry-standard machine learning generative AI and data visualization tools to inform confident decision-making and drive innovative product creation. The Data Scientist applies cut&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;b&gt;Responsibilities&lt;/b&gt;&lt;/p&gt;&lt;p&gt;The Data Scientist designs develops and implements advanced analytics and generative AI models to deliver predictive and prescriptive insights from large-scale structured and unstructured data. This role partners with cross-functional teams to translate business challenges into data-driven solutions leveraging industry-standard machine learning generative AI and data visualization tools to inform confident decision-making and drive innovative product creation.The Data Scientist applies cutting-edge tools and technologies across on-premises and cloud environments (including GCP Vertex AI and IBM Watsonx) to design descriptive predictive and prescriptive solutions. This position also fosters data literacy and promotes the adoption of AI and ML capabilities across UPS.&lt;/p&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;Responsibilities&lt;/p&gt;&lt;p&gt;Define and integrate key data sources (internal UPS data and external datasets) to deliver predictive and generative AI models.&lt;/p&gt;&lt;p&gt;Develop and implement robust data pipelines for cleansing transformation and enrichment of large multi-source datasets.&lt;/p&gt;&lt;p&gt;Collaborate with data engineering teams to validate and test data pipelines and models during proof-of-concept and production phases.&lt;/p&gt;&lt;p&gt;Perform exploratory data analysis (EDA) to identify trends correlations and actionable patterns that meet business needs.&lt;/p&gt;&lt;p&gt;Design and deploy generative AI solutions integrating them into analytics and product development workflows.&lt;/p&gt;&lt;p&gt;Define and track model KPIs ensuring ongoing validation testing and retraining of models to align with business objectives.&lt;/p&gt;&lt;p&gt;Create reusable and scalable solutions through clear documentation process flows logs and clean well-commented code.&lt;/p&gt;&lt;p&gt;Communicate findings through concise reports data visualizations and storytelling to both technical and non-technical stakeholders.&lt;/p&gt;&lt;p&gt;Present operationalized insights and provide strategic recommendations to business and executive-level stakeholders.&lt;/p&gt;&lt;p&gt;Apply best practices in statistical modeling machine learning generative AI distributed computing cloud-based AI and performance optimization for production deployment.&lt;/p&gt;&lt;p&gt;Leverage emerging tools open-source frameworks and cloud technologies (including Vertex AI Databricks and IBM WatsonX) to create predictive and prescriptive analytics solutions.&lt;/p&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;Required Qualifications&lt;/p&gt;&lt;p&gt;Bachelors degree in a quantitative discipline (e.g. Statistics Mathematics Computer Science Engineering Operations Research or related field).&lt;/p&gt;&lt;p&gt;Masters degree preferred.&lt;/p&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;Minimum 5+ years of experience in applied data science machine learning generative AI or advanced analytics.&lt;/p&gt;&lt;p&gt;Proven experience in building and launching moderate-to-large-scale analytics and AI projects into production.&lt;/p&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;Proficiency in Python R and SQL for data preparation querying and model development.&lt;/p&gt;&lt;p&gt;Strong knowledge of supervised unsupervised and generative AI techniques such as regression classification clustering causal inference and large language models (LLMs).&lt;/p&gt;&lt;p&gt;Hands-on experience with GCP Vertex AI IBM WatsonX Databricks or SageMaker and frameworks like TensorFlow PyTorch and Keras.&lt;/p&gt;&lt;p&gt;Familiarity with data visualization tools (e.g. Tableau Power BI Shiny D3) to communicate insights effectively.&lt;/p&gt;&lt;p&gt;Experience working with Linux/Unix and Windows environments.&lt;/p&gt;&lt;p&gt;Familiarity with Java or C++ is a plus.&lt;/p&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;Strong analytical skills with attention to detail and a rigorous problem-solving approach.&lt;/p&gt;&lt;p&gt;Ability to translate complex business problems into high-level AI and analytics solutions.&lt;/p&gt;&lt;p&gt;Excellent oral and written communication skills with the ability to explain analytical and generative AI concepts to both technical and non-technical stakeholders.&lt;/p&gt;&lt;p&gt;Strong storytelling skills to communicate data-driven insights in a clear impactful way.&lt;/p&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;Preferred Experience&lt;/p&gt;&lt;p&gt;Expertise in cloud AI technologies (GCP IBM WatsonX AWS Azure) and modern data pipelines.&lt;/p&gt;&lt;p&gt;Demonstrated success in implementing generative AI (LLMs text-to-image summarization conversational AI) for business use cases.&lt;/p&gt;&lt;p&gt;Track record of curiosity and innovation with the ability to explore complex datasets and generate actionable insights.&lt;/p&gt;&lt;p&gt;Background in operations research or quantitative social science is a strong plus.&lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Architect / Designer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Cognizant&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Chennai&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/816049/architect-at-cognizant/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/816049/architect-at-cognizant/</link>
  <title>[Full Time] Architect at Cognizant</title>
  <dc:date>Wed, 11 Mar 2026 06:04:44 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/815537/duck-creek-claims-developer-at-coforge/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;Required Skills &amp;amp; Experience:&lt;/p&gt; &lt;p&gt;1. Duck Creek Claims Development (MUST)&amp;nbsp;&lt;/p&gt; &lt;p&gt;Strong hands-on experience in Duck Creek Claims configuration and data structures. 2. Relational Databases (MUST)&amp;nbsp;&lt;/p&gt; &lt;p&gt;Proficient in SQL and relational database concepts; ability to write complex queries for data validation and transformation.&lt;/p&gt; &lt;p&gt;3. Experience in data migration projects, including mapping and reconciliation.&lt;/p&gt; &lt;p&gt;4. Strong analytical and problem-solving skills.&lt;/p&gt; &lt;p&gt;5. Open to learning new tools and technologies as required by the project.&lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Coforge&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Mumbai&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/815537/duck-creek-claims-developer-at-coforge/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/815537/duck-creek-claims-developer-at-coforge/</link>
  <title>[Full Time] Duck Creek Claims Developer at Coforge</title>
  <dc:date>Wed, 11 Mar 2026 04:18:37 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/815806/application-developer-oracle-cloud-middleware-at-ibm/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;ul&gt; &lt;li&gt;As a Software Developer you&apos;ll participate in many aspects of the software development lifecycle, such as design, code implementation, testing, and support. You will create software that enables your clients&apos; hybrid-cloud and AI journeys. &lt;/li&gt; &lt;li&gt;&lt;b&gt;Your primary responsibilities include:&lt;/b&gt; &lt;/li&gt; &lt;li&gt;Comprehensive Feature Development and Issue Resolution: Working on the end to end feature development and solving challenges faced in the implementation. &lt;/li&gt; &lt;li&gt;Stakeholder Collaboration and Issue Resolution: Collaborate with key stakeholders, internal and external, to understand the problems, issues with the product and features and solve the issues as per SLAs defined. &lt;/li&gt; &lt;li&gt;Continuous Learning and Technology Integration: Being eager to learn new technologies and implementing the same in feature development&lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;div&gt; &lt;b&gt;Required education&lt;/b&gt; &lt;/div&gt; Bachelor&apos;s Degree &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;b&gt;Preferred education&lt;/b&gt; &lt;/div&gt; Master&apos;s Degree &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;b&gt;Required technical and professional expertise&lt;/b&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt;Should have minimum 3 or more years of relevant experience in ODI(Oracle Database Integrator) 12c Development and Implementation.&lt;/li&gt; &lt;li&gt;Should have good knowledge of integrating with Web Services, XML(Extensible Markup Language) and other API(Application Programming Interface) to transfer the data - from source and target, in addition to database.&lt;/li&gt; &lt;li&gt;Should have hands on experience in complex data migration between heterogeneous large complex databases (Oracle database is must&lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;b&gt;Preferred technical and professional experience&lt;/b&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt;Exposure in risks management and resolving issues that affect release scope.&lt;/li&gt; &lt;li&gt;Ability to maintain quality and bring potential solutions to the table&lt;/li&gt; &lt;/ul&gt; &lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;IBM&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Kolkata&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/815806/application-developer-oracle-cloud-middleware-at-ibm/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/815806/application-developer-oracle-cloud-middleware-at-ibm/</link>
  <title>[Full Time] Application Developer-Oracle Cloud Middleware at IBM</title>
  <dc:date>Wed, 11 Mar 2026 03:47:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/815836/application-developer-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Application Developer&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Design, build and configure applications to meet business process and application requirements. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Informatica PowerCenter&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As an Application Developer, you will design, build, and configure applications to meet business process and application requirements. A typical day involves collaborating with various teams to understand their needs, developing solutions, and ensuring that applications are aligned with business objectives. You will engage in problem-solving activities and contribute to the overall success of projects by leveraging your expertise in application development. &lt;br&gt;Roles &amp;amp; Responsibilities:&lt;li&gt;Expected to be an SME, collaborate and manage the team to perform.&lt;/li&gt;&lt;li&gt;Responsible for team decisions.&lt;/li&gt;&lt;li&gt;Engage with multiple teams and contribute on key decisions.&lt;/li&gt;&lt;li&gt;Provide solutions to problems for their immediate team and across multiple teams.&lt;/li&gt;&lt;li&gt;Facilitate knowledge sharing sessions to enhance team capabilities.&lt;/li&gt;&lt;li&gt;Monitor project progress and ensure timely delivery of application features. Professional &amp;amp; Technical &lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;&lt;br&gt;Must To Have &lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;&lt;br&gt;Proficiency in Informatica PowerCenter.&lt;/li&gt;&lt;li&gt;Strong understanding of ETL processes and data integration techniques.&lt;/li&gt;&lt;li&gt;Experience with database management systems and SQL.&lt;/li&gt;&lt;li&gt;Familiarity with application development methodologies and best practices.&lt;/li&gt;&lt;li&gt;Ability to troubleshoot and resolve application issues efficiently. Additional Information:&lt;/li&gt;&lt;li&gt;The candidate should have minimum 5 years of experience in Informatica PowerCenter.&lt;/li&gt;&lt;li&gt;This position is based at our Mumbai office.&lt;/li&gt;&lt;li&gt;A 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Mumbai&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/815836/application-developer-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/815836/application-developer-at-accenture/</link>
  <title>[Full Time] Application Developer at Accenture</title>
  <dc:date>Wed, 11 Mar 2026 03:04:44 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/816094/etl-tester-lead-at-iris-software/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;We are seeking an experienced ETL Tester to support data warehouse and data integration testing initiatives. The role requires expertise in ETL testing, data validation, SQL, and data warehousing concepts, along with the ability to lead testing activities, mentor team members, and collaborate with cross-functional stakeholders. The candidate will play a key role in ensuring data accuracy, integrity, and quality across complex banking data systems.&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Key Responsibilities&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Lead and execute ETL/Data Warehouse testing for large-scale data integration projects&lt;/li&gt;&lt;li&gt;Validate data extraction, transformation, and loading processes from multiple source systems into target data warehouses or data marts&lt;/li&gt;&lt;li&gt;Develop and execute complex SQL queries for data validation, reconciliation, and quality checks&lt;/li&gt;&lt;li&gt;Analyze source-to-target mappings (STTM) and design comprehensive ETL test cases and scenarios&lt;/li&gt;&lt;li&gt;Perform data quality, data completeness, and data integrity checks&lt;/li&gt;&lt;li&gt;Coordinate with data engineers, developers, business analysts, and stakeholders to clarify requirements and resolve issues&lt;/li&gt;&lt;li&gt;Lead defect tracking, root cause analysis, and resolution management&lt;/li&gt;&lt;li&gt;Drive test planning, estimation, and test execution tracking for ETL testing cycles&lt;/li&gt;&lt;li&gt;Mentor junior testers and ensure testing best practices and standards are followed&lt;/li&gt;&lt;li&gt;Participate in test strategy development, automation opportunities, and process improvements&lt;/li&gt;&lt;li&gt;Support UAT cycles and production validation for data releases&lt;/li&gt;&lt;/ul&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Technical Skills&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Strong experience in ETL Testing and Data Warehouse Testing Experience with Python or scripting for data validation Experience with AI tools (GitHub Copilot) for script acceleration Advanced SQL querying and data validation &lt;/li&gt;&lt;li&gt;Experience with ETL tools (e g, Informatica, Talend, DataStage, SSIS, or similar) Knowledge of data warehousing concepts (facts, dimensions, star/snowflake schema) Experience in data reconciliation and data quality validation Familiarity with test management tools (JIRA, ALM, TestRail, etc) &lt;/li&gt;&lt;li&gt;Exposure to Unix/Linux commands for data validation Understanding of data migration and integration testingDomain Expertise &lt;/li&gt;&lt;li&gt;Strong knowledge of Banking or Financial Services domain Exposure to areas such as:o Regulatory / Risk Reporting&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Leadership &amp;amp; Soft Skills&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Experience leading or mentoring ETL testing teams Strong stakeholder communication and coordination skills &lt;/li&gt;&lt;li&gt;Ability to manage multiple testing cycles and priorities &lt;/li&gt;&lt;li&gt;Strong analytical and problem-solving skills &lt;/li&gt;&lt;li&gt;Ability to drive quality assurance best practices and testing governance&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Nice to Have&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Experience with Big Data / Cloud Data Platforms (Snowflake, AWS, Azure, GCP) Knowledge of data automation frameworks &lt;/li&gt;&lt;li&gt;Exposure to Agile / Scrum delivery models&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Education&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Bachelors or Masters degree in Computer Science, Information Technology, or related field&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Preferred Certifications (Optional)&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt; ISTQB Certification Banking or Data Engineering related certifications&lt;/li&gt;&lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Iris Software&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Noida, Gurugram&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/816094/etl-tester-lead-at-iris-software/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/816094/etl-tester-lead-at-iris-software/</link>
  <title>[Full Time] ETL Tester - Lead at Iris Software</title>
  <dc:date>Wed, 11 Mar 2026 03:00:16 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814955/data-architect-at-virtusa/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&lt;strong&gt;Job Summary&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;We are seeking an experienced and highly skilled Data Architect with 14+ years ofstrong hands-on experience in designing scalable, high-performance dataarchitectures&lt;/li&gt;&lt;li&gt;The ideal candidate will have deep expertise in multi-tenant andsingle-tenant data models, strong RDBMS and NoSQL knowledge, and provenexperience in migrating on-premise legacy databases (such as DB2/Oracle 12c) toPostgreSQL on cloud platforms&lt;/li&gt;&lt;li&gt;This role requires both strategic architectural thinking and strong hands-on executioncapability&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Key Responsibilities&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Data Architecture Design Design and implement scalable, secure, and high-performance dataarchitectures&lt;/li&gt;&lt;li&gt;Define data standards, governance frameworks, and best practices&lt;/li&gt;&lt;li&gt;Optimize data storage, partitioning strategies, indexing, and queryperformance&lt;/li&gt;&lt;li&gt;Create logical and physical data models aligned with business requirements&lt;/li&gt;&lt;li&gt;Ensure data integrity, security, availability, and compliance standards&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;o PostgreSQL&lt;/p&gt;&lt;p&gt;o Oracleo SQL-based relational databaseso NoSQL databases (e.g., MongoDB, Cassandra, DynamoDB, etc.) Design schema, indexing strategies, stored procedures, and performancetuning mechanisms&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Lead database capacity planning and performance benchmarking activities&lt;/li&gt;&lt;li&gt;Legacy Modernization Data Migration Lead and execute data migration initiatives from legacy databases suchas DB2 to PostgreSQL on cloud platforms&lt;/li&gt;&lt;li&gt;Collaborate with engineering, DevOps, and product teams to align data&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Mandatory Requirements&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;14+ years of strong hands-on experience in designing data models formulti-tenant and single-tenant architectures&lt;/li&gt;&lt;li&gt;Strong hands-on experience with RDBMS including:&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;o PostgreSQL&lt;/p&gt;&lt;p&gt;o Oracleo SQL-based databases &lt;/p&gt;&lt;ul&gt;&lt;li&gt;Strong hands-on experience with NoSQL databases&lt;/li&gt;&lt;li&gt;Preferred Qualifications Experience with data warehousing and ETL frameworks&lt;/li&gt;&lt;li&gt;Familiarity with microservices-based architecture&lt;/li&gt;&lt;li&gt;Knowledge of data security standards and compliance frameworks&lt;/li&gt;&lt;li&gt;Experience with CI/CD pipelines for database deployments&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Soft Skills&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Strong analytical and problem-solving skills &lt;/li&gt;&lt;li&gt;Excellent stakeholder communication &lt;/li&gt;&lt;li&gt;Ability to work in fast-paced environments Strong documentation skills &lt;/li&gt;&lt;li&gt;Leadership and mentoring capability&lt;/li&gt;&lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Banking&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Virtusa&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814955/data-architect-at-virtusa/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814955/data-architect-at-virtusa/</link>
  <title>[Full Time] Data Architect at Virtusa</title>
  <dc:date>Tue, 10 Mar 2026 16:14:51 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/815062/senior-etl-talend-developer-at-datamatics/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; Experience 6+ years of relevant experience in ETL development and data integration. Qualifications Bachelor s in Technology - B.Tech/B.E (IT) or higher from reputed institute/University. Mandatory Skills Design, develop, and maintain robust ETL solutions using Talend and IBM DataStage. Job Desc &lt;strong&gt; &lt;u&gt;Job Description&lt;/u&gt; &lt;/strong&gt; Job Title Senior ETL Talend Developer Job &lt;p&gt; &lt;strong&gt; &lt;u&gt;Role Summary&lt;/u&gt; &lt;/strong&gt; &lt;/p&gt; &lt;p&gt;We are seeking an experienced Senior ETL Talend Developer to design, develop and maintain robust ETL solutions using Talend and IBM DataStage.&lt;/p&gt; &lt;p&gt; &lt;strong&gt; &lt;u&gt;Key Responsibilities&lt;/u&gt; &lt;/strong&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt;Design, develop, and maintain robust ETL solutions using Talend and IBM DataStage.&lt;/li&gt; &lt;li&gt;Analyse complex data requirements and build efficient ETL pipelines to support business needs.&lt;/li&gt; &lt;li&gt;Collaborate with business stakeholders and cross-functional IT teams to understand data integration needs and propose optimal solutions.&lt;/li&gt; &lt;li&gt;Perform data extraction, transformation, and loading from multiple data sources to target data warehouses or data lakes.&lt;/li&gt; &lt;li&gt;Implement data quality checks, exception handling, and performance optimization in ETL processes.&lt;/li&gt; &lt;li&gt;Conduct unit testing, validate data accuracy, and troubleshoot technical issues in ETL jobs.&lt;/li&gt; &lt;li&gt;Create and maintain comprehensive technical documentation, including mappings, workflows, and data dictionaries.&lt;/li&gt; &lt;li&gt;Participate in code reviews, deployments, and production support as needed.&lt;/li&gt; &lt;li&gt;Ensure adherence to data governance, security, and compliance standards.&lt;/li&gt; &lt;li&gt;Stay current with industry best practices and tools in ETL and data integration technologies.&lt;/li&gt; &lt;li&gt;6+ years of experience in ETL development and data integration.&lt;/li&gt; &lt;li&gt;Minimum 4 years of strong hands-on experience in both Talend (preferably Talend Open Studio/Enterprise) and IBM DataStage.&lt;/li&gt; &lt;li&gt;Proficient in ETL job design, performance tuning, and handling large-scale data processing.&lt;/li&gt; &lt;li&gt;Solid understanding of RDBMS concepts, SQL queries, and experience working with databases such as Oracle, SQL Server, or PostgreSQL.&lt;/li&gt; &lt;li&gt;Strong analytical and problem-solving skills with attention to detail.&lt;/li&gt; &lt;li&gt;Ability to interpret business requirements and convert them into technical specifications.&lt;/li&gt; &lt;li&gt;Good understanding of data modelling, data warehousing concepts, and data lifecycle management.&lt;/li&gt; &lt;li&gt;Experience with version control systems (e.g., Git, SVN) and job scheduling tools is a plus.&lt;/li&gt; &lt;li&gt;Excellent communication, interpersonal, and documentation skills.&lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;strong&gt; &lt;u&gt;Qualifications&lt;/u&gt; &lt;/strong&gt; &lt;/p&gt; &lt;p&gt; &lt;u&gt;Education:&lt;/u&gt;Education: B.Tech/B.E (IT) or higher.&lt;/p&gt; &lt;p&gt; &lt;strong&gt; &lt;u&gt;Competencies Desired&lt;/u&gt; &lt;/strong&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; 6+ years experience in ETL.&lt;/li&gt; &lt;li&gt; Minimum 3 years of experience in Talend and Datastage development.&lt;/li&gt; &lt;li&gt; Expertise in designing and implementing Talend and Datastage ETL jobs.&lt;/li&gt; &lt;li&gt; Strong analytical and problem-solving skills.&lt;/li&gt; &lt;li&gt; Design, develop, and maintain Talend integration solutions&lt;/li&gt; &lt;/ul&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Datamatics&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Mumbai&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/815062/senior-etl-talend-developer-at-datamatics/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/815062/senior-etl-talend-developer-at-datamatics/</link>
  <title>[Full Time] Senior ETL Talend Developer at Datamatics</title>
  <dc:date>Tue, 10 Mar 2026 14:32:50 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/819504/lead-software-engineer-at-virtusa/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt;&lt;div&gt;&lt;div&gt;&lt;div&gt;&lt;p&gt;&lt;strong&gt;Job Summary:&lt;/strong&gt; We are seeking a highly skilled Senior Consultant with extensive experience in Snowflake to join our dynamic consultancy team. The ideal candidate will possess a strong background in data warehousing and analytics, with a proven track record of delivering high-quality solutions to clients. This role requires a strategic thinker who can effectively communicate with stakeholders and drive project success.&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Responsibilities:&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Lead the design and implementation of Snowflake-based data solutions for clients.&lt;/li&gt;&lt;li&gt;Collaborate with cross-functional teams to gather requirements and translate them into technical specifications.&lt;/li&gt;&lt;li&gt;Provide expert guidance on best practices for data modeling, ETL processes, and performance optimization in Snowflake.&lt;/li&gt;&lt;li&gt;Conduct training sessions and workshops for clients to enhance their understanding of Snowflake capabilities.&lt;/li&gt;&lt;li&gt;Monitor project progress and ensure timely delivery of milestones while maintaining high-quality standards.&lt;/li&gt;&lt;li&gt;Act as a primary point of contact for clients, addressing any concerns and providing ongoing support.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Mandatory Skills:&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Proven expertise in Snowflake, including data loading, transformation, and querying.&lt;/li&gt;&lt;li&gt;Strong understanding of data warehousing concepts and architecture.&lt;/li&gt;&lt;li&gt;Experience with SQL and data modeling techniques.&lt;/li&gt;&lt;li&gt;Excellent problem-solving skills and attention to detail.&lt;/li&gt;&lt;li&gt;Strong communication and interpersonal skills.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Preferred Skills:&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Familiarity with cloud platforms such as AWS, Azure, or Google Cloud.&lt;/li&gt;&lt;li&gt;Experience with data visualization tools like Tableau or Power BI.&lt;/li&gt;&lt;li&gt;Knowledge of programming languages such as Python or Java.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Qualifications:&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Bachelor&apos;&apos;s degree in Computer Science, Information Technology, or a related field.&lt;/li&gt;&lt;li&gt;Relevant certifications in Snowflake or data analytics are a plus.&lt;/li&gt;&lt;li&gt;7-10 years of experience in data consultancy or related roles. &lt;/li&gt;&lt;/ul&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Banking&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Architect / Designer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Virtusa&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Chennai&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/819504/lead-software-engineer-at-virtusa/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/819504/lead-software-engineer-at-virtusa/</link>
  <title>[Full Time] Lead Software Engineer at Virtusa</title>
  <dc:date>Tue, 10 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/815053/sr-script-writer-at-dxc-technology/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt; &lt;b&gt; &lt;span&gt; Skills - Essential &lt;/span&gt; &lt;/b&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; Experience on multiple full release project life cycles including Agile. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Demonstrates technical leadership skills &lt;/span&gt; &lt;/li&gt; &lt;li&gt; Expertise in working with SQL in an AWS data warehousing/data lake environment. &lt;/li&gt; &lt;li&gt; SQL Programming: Strong command over SQL programming language and its syntax, including complex queries, joins, subqueries, indexing, and optimization techniques. &lt;span&gt; (Most Essential &lt;/span&gt; ) &lt;/li&gt; &lt;li&gt; Performance Tuning and Optimization: Ability to analyse query performance, identify bottlenecks, and optimize SQL statements and database configurations for improved efficiency and response times. &lt;/li&gt; &lt;li&gt; Troubleshooting and Debugging: Proficiency in diagnosing and resolving database-related issues, such as connectivity problems, performance degradation, or data integrity concerns. &lt;/li&gt; &lt;li&gt; Experienced using AWS Glue or AWS RedShift alternate cloud technologies would be considered also. &lt;/li&gt; &lt;li&gt; Documentation and Communication: Strong written and verbal communication skills to effectively document scripts, procedures, and database changes. Ability to collaborate with other team members and stakeholders. &lt;/li&gt; &lt;li&gt; Problem-solving and Analytical Thinking: Strong problem-solving skills and the ability to think analytically when analysing complex data requirements, designing efficient queries, and resolving database-related issues. &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;b&gt; &lt;span&gt; Skills Desirable &lt;/span&gt; &lt;/b&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; Experience with data visualization and creating interactive dashboards in Qlik Sense. &lt;/li&gt; &lt;li&gt; Develop, enhance, and maintain Qlik Sense applications, reports, and dashboards to meet business requirements. &lt;/li&gt; &lt;li&gt; Knowledge of data integration techniques and experience with ETL (Extract, Transform, Load) processes. &lt;/li&gt; &lt;li&gt; Ability to work with large datasets and ensure data integrity and accuracy. &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;b&gt; &lt;span&gt; Experience &lt;/span&gt; &lt;/b&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; Participates as a member of the development teams. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Development knowledge of multiple back-end languages and technologies &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Participates in cross-functional teams. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Applies technical knowledge to provide maintenance solutions across one or more technology areas. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Supports the team to develop innovative team solutions &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Participates in code reviews. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Uses knowledge and experience to help shape the application backlog. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Actively contributes to refinement sessions. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Flexible and willing to accept a change in priorities as necessary. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Attention to detail with a pragmatic and enthusiastic attitude to work. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Ability to work effectively in a globally dispersed team and with clients and vendors &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;/p&gt; &lt;p&gt; &lt;/p&gt; &lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Architect / Designer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;DXC Technology&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/815053/sr-script-writer-at-dxc-technology/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/815053/sr-script-writer-at-dxc-technology/</link>
  <title>[Full Time] Sr Script Writer at DXC Technology</title>
  <dc:date>Tue, 10 Mar 2026 11:02:31 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814931/bi-developer-power-bi-at-exl/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt; &lt;strong&gt;Designation:&lt;/strong&gt; Power BI Developer &lt;strong&gt;Experience:&lt;/strong&gt; 5 10 years&lt;/p&gt; &lt;p&gt; &lt;strong&gt;Role &amp;amp; Responsibilities:&lt;/strong&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt;Working experience in insurance reporting, including analyzing and visualizing data.&lt;/li&gt; &lt;li&gt;Connect various data sources, import and transform data for Business Intelligence purposes.&lt;/li&gt; &lt;li&gt;Build BI solutions while leveraging data analysis techniques and industry best practices.&lt;/li&gt; &lt;li&gt;Develop, publish, and schedule &lt;strong&gt;Power BI reports&lt;/strong&gt; in alignment with business requirements.&lt;/li&gt; &lt;li&gt;Design visual reports, dashboards, and KPI scorecards using &lt;strong&gt;Power BI Desktop&lt;/strong&gt; &lt;/li&gt; &lt;li&gt;Apply transformations, filters, prompts, calculated fields, sets, groups, parameters, graphs, and forecasts effectively in Tableau.&lt;/li&gt; &lt;li&gt;Utilize geo codes for map layouts and maintain expertise in action filters, LODs (Level of Detail expressions), and Tableau functions.&lt;/li&gt; &lt;li&gt;Gather, refine, and translate complex business requirements into reporting solutions.&lt;/li&gt; &lt;li&gt;Exhibit advanced statistical and analytical capabilities to derive meaningful insights.&lt;/li&gt; &lt;li&gt;Communicate findings effectively through compelling data visualizations and storytelling.&lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;strong&gt;Qualifications:&lt;/strong&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt;Strong experience in &lt;strong&gt;Power BI&lt;/strong&gt; development.&lt;/li&gt; &lt;li&gt;Proficiency in &lt;strong&gt;SQL programming&lt;/strong&gt;, including advanced SQL writing skills.&lt;/li&gt; &lt;li&gt;Solid understanding of relational and dimensional data models.&lt;/li&gt; &lt;li&gt;Demonstrated ability to lead a team and provide technical direction.&lt;/li&gt; &lt;li&gt;Knowledge of data visualization best practices and user experience design is a plus.&lt;/li&gt; &lt;li&gt;Excellent analytical, organizational, prioritization, and oral/written communication skills.&lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;strong&gt;Good-to-Have Skills:&lt;/strong&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt;Familiarity with &lt;strong&gt;Tableau&lt;/strong&gt; and &lt;strong&gt;Cognos&lt;/strong&gt; for advanced BI and reporting capabilities.&lt;/li&gt; &lt;li&gt;Experience with &lt;strong&gt;Tableau&lt;/strong&gt;, including effective spreadsheet modeling and advanced functionalities (date, string, numeric calculations, etc.).&lt;/li&gt; &lt;li&gt;Proficiency in DevOps tools and practices (e.g., Jenkins, Git) with experience in CI/CD pipelines.&lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;strong&gt;Qualifications:&lt;/strong&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt;Strong experience in &lt;strong&gt;Power BI&lt;/strong&gt; development.&lt;/li&gt; &lt;li&gt;Proficiency in &lt;strong&gt;SQL programming&lt;/strong&gt;, including advanced SQL writing skills.&lt;/li&gt; &lt;li&gt;Solid understanding of relational and dimensional data models.&lt;/li&gt; &lt;li&gt;Demonstrated ability to lead a team and provide technical direction.&lt;/li&gt; &lt;li&gt;Knowledge of data visualization best practices and user experience design is a plus.&lt;/li&gt; &lt;li&gt;Excellent analytical, organizational, prioritization, and oral/written communication skills.&lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;strong&gt;Role &amp;amp; Responsibilities:&lt;/strong&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt;Working experience in insurance reporting, including analyzing and visualizing data.&lt;/li&gt; &lt;li&gt;Connect various data sources, import and transform data for Business Intelligence purposes.&lt;/li&gt; &lt;li&gt;Build BI solutions while leveraging data analysis techniques and industry best practices.&lt;/li&gt; &lt;li&gt;Develop, publish, and schedule &lt;strong&gt;Power BI reports&lt;/strong&gt; in alignment with business requirements.&lt;/li&gt; &lt;li&gt;Design visual reports, dashboards, and KPI scorecards using &lt;strong&gt;Power BI Desktop&lt;/strong&gt; &lt;/li&gt; &lt;li&gt;Apply transformations, filters, prompts, calculated fields, sets, groups, parameters, graphs, and forecasts effectively in Tableau.&lt;/li&gt; &lt;li&gt;Utilize geo codes for map layouts and maintain expertise in action filters, LODs (Level of Detail expressions), and Tableau functions.&lt;/li&gt; &lt;li&gt;Gather, refine, and translate complex business requirements into reporting solutions.&lt;/li&gt; &lt;li&gt;Exhibit advanced statistical and analytical capabilities to derive meaningful insights.&lt;/li&gt; &lt;li&gt;Communicate findings effectively through compelling data visualizations and storytelling.&lt;/li&gt; &lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Software Product&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;EXL&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Noida, Gurugram&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814931/bi-developer-power-bi-at-exl/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814931/bi-developer-power-bi-at-exl/</link>
  <title>[Full Time] BI Developer- Power BI at EXL</title>
  <dc:date>Tue, 10 Mar 2026 08:01:42 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/815036/snowflake-developer-architect-at-zensar/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&lt;strong&gt;&quot; Immediate joiners only &quot;&lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;Skills required to contribute:&lt;/p&gt;&lt;p&gt;5-7 Years of Data and Analytics experience with minimum 3+ years in Snowflake Cloud Data warehousing&lt;/p&gt;&lt;p&gt;1. Excellent communication and interpersonal skills.&lt;/p&gt;&lt;p&gt;2. Extensive experience in Snowflake Virtual warehouse (Compute), Data modeling &amp;amp; storage including LDM/PDM design, data loading/unloading and data sharing, SnowSQL (CLI) as well as Snowflake Internals and integrations, SnowPipe implementation, S3 Internal data copy/movement, Snowflake security including readers and consumers accounts.&lt;/p&gt;&lt;p&gt;3. Advance SQL knowledge and hands on experience on complex queries writing using with Analytical functions, Troubleshooting, problem solving and performance tuning of SQL queries accessing data warehouse as well as Strong knowledge on stored procedures&lt;/p&gt;&lt;p&gt;4. 4+ Years of experience in Informatica 10.x &amp;amp; above with all modules including ETL design &amp;amp; development and troubleshooting / debugging, Repository Manager, Strong experience in Informatica Mapping Designer (Mapping &amp;amp; Mapplets creations using all Transformations) and Workflow Designer (Session &amp;amp; Task implementation and Job scheduling)&lt;/p&gt;&lt;p&gt;5. Good experience in Requirements Analysis and Solution Architecture Design, Data modelling, ETL, data integration and data migration design&lt;/p&gt;&lt;p&gt;6. Well versed with Waterfall, Agile, Scrum and similar project delivery methodologies.&lt;/p&gt;&lt;p&gt;7. Experienced in internal as well as external stakeholder management&lt;/p&gt;&lt;p&gt;8. SnowPro-Core / SnowPro-Advanced certification will be added advantage.&lt;/p&gt;&lt;p&gt;9. Nice to have skills: Working experience in Reporting technologies like Tableau or Power BI and Open source stack Pyspark, Python etc.&lt;/p&gt;&lt;p&gt;10. Good to have Hi-Tech manufacturing domain experience&lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Zensar&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Pune&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/815036/snowflake-developer-architect-at-zensar/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/815036/snowflake-developer-architect-at-zensar/</link>
  <title>[Full Time] Snowflake Developer /Architect at Zensar</title>
  <dc:date>Tue, 10 Mar 2026 04:12:49 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/815514/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;Project Role :&lt;/b&gt;Data Architect&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;SAS Analytics&lt;br&gt;&lt;b&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;12&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:&lt;br&gt;As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the data architecture aligns with the overall business objectives and technical specifications. You will collaborate with various teams to ensure that the data architecture is robust, scalable, and efficient, while also addressing any challenges that arise during the development process. Your role will be pivotal in shaping the data landscape of the organization, enabling data-driven decision-making and enhancing operational efficiency. &lt;b&gt;Roles &amp;amp; Responsibilities:&lt;/b&gt;&lt;br&gt;&lt;li&gt;Expected to be an SME.&lt;/li&gt;&lt;li&gt;Collaborate and manage the team to perform.&lt;/li&gt;&lt;li&gt;Responsible for team decisions.&lt;/li&gt;&lt;li&gt;Engage with multiple teams and contribute on key decisions.&lt;/li&gt;&lt;li&gt;Expected to provide solutions to problems that apply across multiple teams.&lt;/li&gt;&lt;li&gt;Facilitate knowledge sharing and best practices among team members.&lt;/li&gt;&lt;li&gt;Evaluate and recommend tools and technologies to enhance data architecture. &lt;b&gt;Professional &amp;amp; Technical Skills:&lt;/b&gt;&lt;br&gt;&lt;b&gt;Must To Have Skills:&lt;/b&gt;&lt;br&gt;Proficiency in SAS Analytics.&lt;/li&gt;&lt;li&gt;Strong understanding of data modeling techniques and best practices.&lt;/li&gt;&lt;li&gt;Experience with data integration and ETL processes.&lt;/li&gt;&lt;li&gt;Familiarity with cloud data storage solutions and architectures.&lt;/li&gt;&lt;li&gt;Ability to analyze complex data sets and derive actionable insights. &lt;b&gt;Additional Information:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;The candidate should have minimum 12 years of experience in SAS Analytics.&lt;/li&gt;&lt;li&gt;This position is based at our Bengaluru office.&lt;/li&gt;&lt;li&gt;A 15 years full time education is required.&lt;b&gt;Qualification&lt;/b&gt;&lt;br&gt;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/815514/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/815514/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Mon, 09 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/815531/data-architect-at-cgi/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;Position Description:&lt;/b&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Qualification&lt;/strong&gt;: &lt;/p&gt;&lt;ul&gt;&lt;li&gt;Bachelors or Masters degree in Computer Science, Engineering, or a related field&lt;/li&gt;&lt;li&gt;We are seeking a skilled Data Architect with 10+ years of hands-on experience in managing enterprise databases and supporting critical application deployments&lt;/li&gt;&lt;li&gt;The role requires strong expertise in data migration, database object creation (tables, indexes, procedures), performance tuning, and deployment scripting.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Key Responsibilities&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Perform data migration across environments and platforms&lt;/li&gt;&lt;li&gt;Develop and maintain conceptual, logical, and physical data models to support business and technical requirements&lt;/li&gt;&lt;li&gt;Analyse existing systems and translate business needs into data structures and relationships&lt;/li&gt;&lt;li&gt;Define data standards, naming conventions, and metadata for consistency across the organization&lt;/li&gt;&lt;li&gt;Ensure models support performance, scalability, and integration requirements&lt;/li&gt;&lt;li&gt;Design and create database objects such as tables, indexes, stored procedures, and views&lt;/li&gt;&lt;li&gt;Optimize queries and perform performance tuning to ensure efficient database operations&lt;/li&gt;&lt;li&gt;Prepare and execute deployment scripts for releases and environment refreshes&lt;/li&gt;&lt;li&gt;Monitor database health, troubleshoot issues, and ensure availability and reliability&lt;/li&gt;&lt;li&gt;Collaborate with development and DevOps teams to support application deployments&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Required Skills &lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Strong expertise in data modeling methodologies (3NF, dimensional/star schema, snowflake, etc)&lt;/li&gt;&lt;li&gt;Proficiency with data modeling tools (ERwin, ER/Studio, PowerDesigner, or similar)&lt;/li&gt;&lt;li&gt;Strong SQL knowledge and understanding of RDBMS concepts&lt;/li&gt;&lt;li&gt;Proven experience in data migration, performance tuning, and query optimization&lt;/li&gt;&lt;li&gt;Strong skills in SQL scripting and automation&lt;/li&gt;&lt;li&gt;Hands-on knowledge of backup/recovery tools and deployment processes&lt;/li&gt;&lt;li&gt;Good understanding of indexes, partitions, and database design principles&lt;/li&gt;&lt;li&gt;Excellent communication and problem-solving skills&lt;/li&gt;&lt;li&gt;Nice to Have Experience with CI/CD pipelines and DevOps-based database deployments&lt;/li&gt;&lt;li&gt;Exposure to cloud databases (AWS RDS, Azure SQL, GCP) is a plus.&lt;/li&gt;&lt;/ul&gt;&lt;div&gt;&lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;Application Development&lt;/li&gt;&lt;li&gt;Applications Administration&lt;/li&gt;&lt;li&gt;Database&lt;/li&gt;&lt;/ul&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;CGI&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/815531/data-architect-at-cgi/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/815531/data-architect-at-cgi/</link>
  <title>[Full Time] Data Architect at CGI</title>
  <dc:date>Mon, 09 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/819171/app-dev-support-engineer-ii-at-conduent/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;Design, develop, and maintain ETL processes using Oracle Data Integrator (ODI 12c):&lt;/li&gt;&lt;li&gt;Write and optimize PL/SQL procedures, functions, packages, and triggers &lt;/li&gt;&lt;li&gt;Perform data extraction, transformation, and loading (ETL) from multiple sources &lt;/li&gt;&lt;li&gt;Collaborate with data architects and business analysts to understand requirements &lt;/li&gt;&lt;li&gt;Collaborate with business analysts, testers, and other developers to gather requirements and resolve issues &lt;/li&gt;&lt;li&gt;Monitor daily ETL jobs, troubleshoot failures, and provide production support &lt;/li&gt;&lt;li&gt;Perform performance tuning and error handling &lt;/li&gt;&lt;li&gt;Participate in code reviews, testing, and deployment support &lt;/li&gt;&lt;li&gt;Prepare technical documentation and maintain best practices Professional &amp;amp; Technical Skills&lt;/li&gt;&lt;li&gt;Strong hands-on experience with ODI 12c (Oracle Data Integrator) &lt;/li&gt;&lt;li&gt;Expertise in Oracle SQL and PL/SQL &lt;/li&gt;&lt;li&gt;Experience with data warehousing concepts, data modeling, and relational databases &lt;/li&gt;&lt;li&gt;Strong analytical and problem-solving skills Additional Information&lt;/li&gt;&lt;li&gt;The candidate should have minimum 3 years of experience in Oracle Data Integrator (ODI).&lt;/li&gt;&lt;/ul&gt;&lt;br&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;BPM / BPO&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Developer / Engineer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Conduent&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/819171/app-dev-support-engineer-ii-at-conduent/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/819171/app-dev-support-engineer-ii-at-conduent/</link>
  <title>[Full Time] App Dev &amp;amp; Support Engineer II at Conduent</title>
  <dc:date>Mon, 09 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/815524/data-architect-at-iris-software/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt;&lt;ul&gt;&lt;li&gt;Design, develop, and optimize complex data pipelines and transformation processes using Snowflake, dbt, and AWS services. &lt;/li&gt;&lt;li&gt;Implement and manage data integration workflows using Fivetran to ensure timely and accurate data ingestion from various sources. &lt;/li&gt;&lt;li&gt;Develop and maintain scalable data models and schemas in Snowflake, ensuring they meet performance and business requirements. &lt;/li&gt;&lt;li&gt;Monitor and fine-tune the performance of data pipelines, queries, and data models to ensure optimal efficiency and cost-effectiveness. &lt;/li&gt;&lt;li&gt;Utilize Snowflakes features, such as Time Travel, Zero-Copy Cloning, and Data Sharing, to enhance data management and performance. &lt;/li&gt;&lt;li&gt;Leverage AWS services, such as AWS Lambda, S3, and Glue, to build and manage serverless data processing workflows and data storage solutions. &lt;/li&gt;&lt;li&gt;Implement data security measures and ensure compliance with data privacy regulations and organizational policies. &lt;/li&gt;&lt;li&gt;Troubleshoot and resolve complex data issues, including data sync errors, performance bottlenecks, and integration challenges. Provide support for data-related incidents and ensure effective resolution of production issues. &lt;/li&gt;&lt;li&gt;Collaborate with data analysts, and other stakeholders to understand data needs and deliver effective solutions. &lt;/li&gt;&lt;li&gt;Document data processes, models, and workflows, ensuring clear communication and knowledge sharing across teams. Independently assess situations, apply sound judgment and discretion, and make decisions on matters of significant impact without direct supervision&lt;/li&gt;&lt;/ul&gt;&lt;div&gt;&lt;div&gt;&lt;b&gt;&lt;b&gt;Mandatory Competencies&lt;/b&gt;&lt;/b&gt;&lt;br&gt;&lt;/div&gt;&lt;div&gt;&lt;div&gt;Cloud - AWS - AWS S3, S3 glacier, AWS EBS&lt;/div&gt;&lt;div&gt;Beh - Communication&lt;/div&gt;&lt;div&gt;Cloud - AWS - AWS Lambda,AWS EventBridge, AWS Fargate&lt;/div&gt;&lt;div&gt;Cloud - AWS - AWS CodeBuild, AWS CodeDeploy, AWS CodePipeline&lt;/div&gt;&lt;div&gt;Database - PostgreSQL - PostgreSQL&lt;/div&gt;&lt;div&gt;Cloud - Cloud - Snowflake&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Iris Software&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Noida, Gurugram&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/815524/data-architect-at-iris-software/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/815524/data-architect-at-iris-software/</link>
  <title>[Full Time] Data Architect at Iris Software</title>
  <dc:date>Mon, 09 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/815525/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt; &lt;b&gt;About The Role &lt;/b&gt; &lt;b&gt; &lt;br&gt;Project Role :&lt;/b&gt;Data Architect &lt;br&gt; &lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;b&gt;Must have skills :&lt;/b&gt;Reltio &lt;b&gt; &lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required &lt;b&gt; &lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education &lt;br&gt; &lt;b&gt;Summary&lt;/b&gt;:As a Data Architect/MDM Architect is a senior technology leader responsible for designing, building, and overseeing the enterprise&apos;s Master Data Management solution. This role is pivotal in establishing a &quot;single source of truth&quot; for critical data domains such as Customer, Product, Vendor, and Employee.You will be the chief technical authority on MDM, defining the end-to-end architecture, data models, governance processes, and integration patterns. You will collaborate closely with business stakeholders, data stewards, and IT teams to ensure the MDM solution aligns with strategic business objectives and drives data quality, consistency, and accessibility across the organization. Key Responsibilities Strategy &amp;amp; Architecture:Develop and own the enterprise MDM strategy, solution architecture, and implementation roadmap.Design scalable, high-performance MDM solutions (on-premise, cloud, or hybrid) that encompass data modeling, data quality, data governance, and data integration.Define the canonical data models, match/merge rules, survivorship logic, and data stewardship workflows for all master data domains.Evaluate and help DEV team implement Reltio MDM - and best practices.Implementation &amp;amp; Integration:Lead the technical design and implementation of the Reltio MDM platform.Architect and design real-time (API) and batch integration patterns to synchronize master data between the MDM hub and critical enterprise systems (e.g., ERP, CRM, data warehouse).Oversee the configuration of the Reltio MDM solution to meet business requirements.Data Governance &amp;amp; Quality:Collaborate with the Data Governance Office and business data stewards to define and enforce data quality rules, standards, and policies within the MDM solution.Design data quality dashboards and reports to monitor the health of master data.Establish and manage the technical processes for data stewardship, including issue resolution and change management.Leadership &amp;amp; Collaboration:Act as the primary technical liaison between business stakeholders, data governance teams, and development teams.Translate complex business requirements into scalable technical specifications.Mentor and provide technical guidance to ETL Integrations data engineers, developers, and Reltio MDM analysts.Champion the value of master data management and data governance across the organization. Skills and &lt;br&gt;Qualifications Required &lt;br&gt;Qualifications Experience:8-12+ years of experience in data management, data architecture, or enterprise architecture.5+ years of hands-on experience designing and implementing large-scale MDM solutions.Technical &lt;br&gt; &lt;b&gt; &lt;br&gt;&lt;/b&gt; &lt;b&gt;Skills:&lt;/b&gt; &lt;li&gt;Expert-level knowledge of at least one major MDM Reltio.Deep understanding of MDM concepts:data modeling, matching, merging, hierarchy management, data quality, and stewardship.Strong experience with data integration technologies and patterns (ETL/ELT, APIs, message queues, web services).Proficiency in SQL and data modeling (conceptual, logical, physical).Experience with one or more major cloud platforms (AWS, Azure, or GCP) and their data services.Soft &lt;br&gt; &lt;b&gt; &lt;br&gt;&lt;/b&gt; &lt;b&gt;Skills:&lt;/b&gt; &lt;/li&gt; &lt;li&gt;Excellent communication and stakeholder management:Ability to explain complex technical concepts to non-technical audiences.Strong leadership and mentoring skills.Strategic thinker with strong analytical and problem-solving abilities Preferred (Nice-to-Have) &lt;br&gt;Qualifications Experience with multiple MDM platforms.Hands-on experience with data engineering tools (e.g., Python, Spark, Databricks).Knowledge of data governance frameworks. Familiarity with data catalog and metadata management tools (e.g., Collibra, Alation).Professional certifications (e.g., TOGAF, DAMA, cloud provider certifications, or tool-specific MDM certifications).Experience in a specific industry (e.g., Finance, Healthcare, Retail) and its related data domains (e.g., Patient, Financial Instrument).Additional Information:&lt;/li&gt;&lt;li&gt;The candidate should have minimum 12 years of experience in Reltio.&lt;/li&gt; &lt;li&gt;This position is based at our Hyderabad office.&lt;/li&gt; &lt;li&gt;A 15 years full time education is required. &lt;b&gt;Qualification&lt;/b&gt;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/815525/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/815525/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Mon, 09 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/817764/process-mining-platform-engineer-at-pepsico/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;strong&gt; Overview &lt;/strong&gt; &lt;p&gt; &lt;span&gt; As a process mining platform engineer you will be responsible for catering to devsecops requirements from sector teams on the platform. Resolving platform incidents and fulfilling the service requests. This role will also involve platform engineering activities on ETL, administration and enabling of new capabilities on the platform. &lt;/span&gt; &lt;/p&gt;&lt;div&gt; &lt;/div&gt; &lt;br&gt; &lt;strong&gt; Responsibilities &lt;/strong&gt; &lt;p&gt; &lt;span&gt; Experienced in data extraction, data modelling and dashboarding of process mining models on Celonis.,Experienced with agile development methodology (i.e. Safe Agile), Coordinate with process owners and business to understand the as-is process and the requirements for process mining .,Reports status, issues and risks to tech leads on a regular basis,Understand existing processes and facilitate change requirements as part of a change control process,Responsible for the requirement understanding, development, testing, implementation and maintenance of new and existing process mining models &lt;/span&gt; &lt;/p&gt;&lt;div&gt; &lt;/div&gt; &lt;br&gt; &lt;strong&gt; Qualifications &lt;/strong&gt; &lt;p&gt; &lt;span&gt; Engineering Degree or PG &lt;/span&gt; &lt;/p&gt;&lt;div&gt; &lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Beverage&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Architect / Designer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Pepsico&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/817764/process-mining-platform-engineer-at-pepsico/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/817764/process-mining-platform-engineer-at-pepsico/</link>
  <title>[Full Time] Process Mining Platform Engineer at Pepsico</title>
  <dc:date>Mon, 09 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/815970/systems-and-infrastructure-engineer-iii-at-walmart/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; Position Summary... &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; What youll do... &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;p&gt; &lt;b&gt; About Team &lt;/b&gt; &lt;/p&gt; &lt;p&gt; Walmart s Enterprise Business Services (EBS) is a powerhouse of several exceptional teams delivering world-class technology solutions and services making a profound impact at every level of Walmart. &lt;/p&gt; &lt;p&gt; As a key part of Walmart Global Tech, our teams set the bar for operational excellence and leverage emerging technology to support millions of customers, associates, and stakeholders worldwide. Each time an associate turns on their laptop, a customer makes a purchase, a new supplier is onboarded, the company closes the books, physical and legal risk is avoided, and when we pay our associates consistently and accurately, that is EBS. Joining EBS means embarking on a journey of limitless growth, relentless innovation, and the chance to set new industry standards that shape the future of Walmart. &lt;/p&gt; &lt;p&gt; &lt;b&gt; What you will do &lt;/b&gt; &lt;/p&gt; &lt;p&gt; The Enterprise Integrations Support team provides 24/7 support and operational management for 2000+ integrations for critical systems such as SAP, Manufacturing, HR, Finance, Real Estate, and many others. We work on multiple Enterprise Integration technologies like SAP BODS, IBM Integration Bus, IBM MQ, MFT, ITX, SAP PI/PO, SAP CPI. &lt;/p&gt; &lt;p&gt; We work on all incidents, change management, problem management across the 2000+ integration applications across multiple domains like Finance, Manufacturing, Retail, People etc across multiple regions. Team work on service improvements to improve stability of Integration applications and minimise manual efforts &lt;/p&gt; &lt;p&gt; As a Systems and Infrastructure Engineer for Walmart, you ll have the opportunity to &lt;/p&gt; &lt;ul&gt; &lt;li&gt; Manage 2000+ Integration applications in Production environment. &lt;/li&gt; &lt;li&gt; The Enterprise Integrations Support team provides 24/7 support and operational management for 2000+ integrations for critical systems such as SAP, Manufacturing, HR, Finance, Real Estate, and many others. &lt;/li&gt; &lt;li&gt; Responsible for renewal of application specific certificates &lt;/li&gt; &lt;li&gt; Each person on the support team is critical as they cover a specific shift to provide the needed support coverage. To ensure we identify and recover as quickly as possible. &lt;/li&gt; &lt;li&gt; You will work in Integrations related tech stacks using several technologies including but not limited to IBM Integrations Bus, IBM MQ, MFT, ITX, SAP CPI, SAP PI/PO, SAP BODS, REST APIs, etc. &lt;/li&gt; &lt;li&gt; Help define Support Roadmap and own the entire support delivery end to end &lt;/li&gt; &lt;li&gt; Work very closely with different product and business stakeholders at various locations in US and India to drive the execution of multiple business plans and technologies &lt;/li&gt; &lt;li&gt; Support business objectives by collaborating with business partners to define priorities, identify opportunities and drive resolutions &lt;/li&gt; &lt;li&gt; Independently own and address and resolve applications issues, provide updates and drive root cause analysis including identifying issue patterns and working with Project Services/Development teams to address them. &lt;/li&gt; &lt;li&gt; Responsible for coming up with processes to maintain incident support metrics like backlog, MTTR, INC SLAs etc &lt;/li&gt; &lt;li&gt; Plan, execute and implement applications and configuration change procedures &lt;/li&gt; &lt;li&gt; Responsible for on-call manager support as per pre-determined shift roster which includes weekend support. &lt;/li&gt; &lt;li&gt; Conceptualize the idea for process improvements and/or automation frameworks that will make for more effective Support engagement &lt;/li&gt; &lt;li&gt; Exhibit strong leadership and communication skills to collaborate with product, engineering and management teams across different geographic locations &lt;/li&gt; &lt;li&gt; Gain &amp;amp;maintain expertise in flow of application process and systems related to domain &lt;/li&gt; &lt;li&gt; Flexibility - responsibilities will require rotational shifts across 24*7 including night shifts and weekend work. All team members will be expected to work around 6-8 days of night shifts every month. &lt;/li&gt; &lt;li&gt; Optimize IT resource utilization &lt;/li&gt; &lt;li&gt; Analyze and come up with curated solutions to system integration problems. &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;b&gt; What you will bring &lt;/b&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; An Engineering Degree - B.E/B.Tech/MS in any stream Computer Science preferred. &lt;/li&gt; &lt;li&gt; 3-6 years of relevant experience, Minimum of 3 years of experience with Integrations technologies. Experience in leading Scrum teams. &lt;/li&gt; &lt;li&gt; Overall 2+ years of relevant experience in Enterprise Integrations production support. &lt;/li&gt; &lt;li&gt; Good analytical skills to analyse the Integrations application issues and fix issues independently. &lt;/li&gt; &lt;li&gt; Should have supported middleware interfaces &lt;/li&gt; &lt;li&gt; Very good debugging skills in SAP BODS. &lt;/li&gt; &lt;li&gt; Good to have exposure to IBM Integration Bus, SAP PI/PO, IBM MQ, SAP CPI. &lt;/li&gt; &lt;li&gt; Hands-on experience in any cloud native technologies like Azure, GCP &lt;br&gt; Exposure to Automation tools and technologies, Scripting languages. &lt;/li&gt; &lt;li&gt; Very strong understanding and experience in the area of software development lifecycle. &lt;/li&gt; &lt;li&gt; Strong Java/J2EE, Spring skills. &lt;/li&gt; &lt;li&gt; Working knowledge in a Cloud based service. &lt;/li&gt; &lt;li&gt; Ability to code/Script in one of the languages (C#, Python and Java) &lt;/li&gt; &lt;li&gt; Mentoring team on technology and process for automation need &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;b&gt; Core Skills: &lt;/b&gt; SAP BODS &lt;/li&gt; &lt;li&gt; &lt;b&gt; Good to have: &lt;/b&gt; IBM Integration Bus/IBM MQ, SAP PI/PO, SAP CPI, MFT, IBM ITX. &lt;br&gt; Messaging and communication: IBM MQ/Active MQ- Basic debugging knowledge &lt;br&gt; App monitoring tools: Service Now, Strong debugging knowledge &lt;br&gt; Cloud Technologies: MS Azure, GCP and Open Stack - Basic debugging knowledge &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;b&gt; About Walmart Global Tech &lt;/b&gt; &lt;/p&gt; &lt;p&gt; &lt;br&gt; . &lt;/p&gt; &lt;p&gt; . &lt;/p&gt; &lt;p&gt; Walmart s culture sets us apart, and we know being together helps us innovate, learn and grow great careers. This role is based in our Bangalore office for daily work, with the flexibility for associates to manage their personal lives. &lt;/p&gt; &lt;p&gt; &lt;b&gt; Benefits &lt;/b&gt; &lt;/p&gt; &lt;p&gt; . &lt;/p&gt; &lt;p&gt; &lt;b&gt; Belonging &lt;/b&gt; &lt;/p&gt; &lt;p&gt; . &lt;/p&gt; &lt;p&gt; . &lt;/p&gt; &lt;p&gt; &lt;b&gt; Equal Opportunity Employer &lt;/b&gt; &lt;/p&gt; &lt;p&gt; Walmart, Inc., is an Equal Opportunities Employer By Choice. We believe we are best equipped to help our associates, customers and the communities we serve live better when we really know them. That means understanding, respecting and valuing unique styles, experiences, identities, ideas and opinions while being inclusive of all people. &lt;/p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; &lt;span&gt; Minimum Qualifications... &lt;/span&gt; &lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; Option 1: Bachelors degree in computer science, computer engineering, information systems, information technology, or related area and 2 years experience in technology infrastructure engineering across areas such as compute, storage, network, mobility or virtualization-related technologies. &lt;br&gt; Option 2: 3 years experience in technology infrastructure engineering across areas such as compute, storage, network, mobility or virtualization relatedtechnologies. &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; &lt;span&gt; Preferred Qualifications... &lt;/span&gt; &lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; Master s degree in computer science, computer engineering, information systems, information technology, or related area &lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Walmart&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/815970/systems-and-infrastructure-engineer-iii-at-walmart/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/815970/systems-and-infrastructure-engineer-iii-at-walmart/</link>
  <title>[Full Time] Systems and Infrastructure Engineer III at Walmart</title>
  <dc:date>Mon, 09 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/815552/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;Project Role :&lt;/b&gt;Data Architect&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Cloud Data Architecture&lt;br&gt;&lt;b&gt;Good to have skills :&lt;/b&gt;Google BigQuery&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:&lt;br&gt;As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the architecture aligns with business needs and technical specifications. You will collaborate with various teams to ensure the data architecture supports the overall goals of the organization, while also addressing any challenges that arise in the data management process. Your role will be pivotal in establishing a robust data framework that enhances data accessibility and usability across the organization. &lt;b&gt;Roles &amp;amp; Responsibilities:&lt;/b&gt;&lt;br&gt;&lt;li&gt;Expected to be an SME.&lt;/li&gt;&lt;li&gt;Collaborate and manage the team to perform.&lt;/li&gt;&lt;li&gt;Responsible for team decisions.&lt;/li&gt;&lt;li&gt;Engage with multiple teams and contribute on key decisions.&lt;/li&gt;&lt;li&gt;Provide solutions to problems for their immediate team and across multiple teams.&lt;/li&gt;&lt;li&gt;Facilitate knowledge sharing sessions to enhance team capabilities.&lt;/li&gt;&lt;li&gt;Develop and maintain documentation related to data architecture and design. &lt;b&gt;Professional &amp;amp; Technical Skills:&lt;/b&gt;&lt;br&gt;&lt;b&gt;Must To Have Skills:&lt;/b&gt;&lt;br&gt;Proficiency in Cloud Data Architecture.&lt;/li&gt;&lt;li&gt;Good To Have Skills:Experience with Google BigQuery.&lt;/li&gt;&lt;li&gt;Strong understanding of data modeling techniques and best practices.&lt;/li&gt;&lt;li&gt;Experience with cloud platforms and services related to data storage and processing.&lt;/li&gt;&lt;li&gt;Familiarity with data integration tools and methodologies. &lt;b&gt;Additional Information:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;The candidate should have minimum 7.5 years of experience in Cloud Data Architecture.&lt;/li&gt;&lt;li&gt;This position is based at our Bengaluru office.&lt;/li&gt;&lt;li&gt;A 15 years full time education is required.&lt;b&gt;Qualification&lt;/b&gt;&lt;br&gt;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Indore&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/815552/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/815552/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Mon, 09 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/815548/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Azure Data Services&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;Microsoft Fabric, Microsoft Azure Databricks, Microsoft SQL Server&lt;br&gt;Minimum &lt;b&gt;3&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will play a crucial role in defining the data requirements and structure for applications. Your typical day will involve modeling and designing application data structures, ensuring efficient storage and integration. You will collaborate with various teams to understand their data needs and translate them into effective architectural solutions, while also addressing any challenges that arise in the data management process. Your expertise will guide the development of robust data frameworks that support the organization&apos;&apos;s objectives and enhance data accessibility and usability. &lt;b&gt;Roles Responsibilities:&lt;/b&gt; &lt;li&gt;Expected to perform independently and become an SME.&lt;/li&gt;&lt;li&gt;Required active participation/contribution in team discussions.&lt;/li&gt;&lt;li&gt;Contribute in providing solutions to work related problems.&lt;/li&gt;&lt;li&gt;Engage in continuous learning to stay updated with industry trends and technologies.&lt;/li&gt;&lt;li&gt;Collaborate with cross-functional teams to ensure alignment of data architecture with business goals. &lt;b&gt;Professional Technical Skills:&lt;/b&gt; &lt;b&gt;&lt;br&gt;Must To Have Skills:&lt;/b&gt;&lt;br&gt; Proficiency in Microsoft Azure Data Services.&lt;/li&gt;&lt;li&gt;Good To Have Skills:Experience with Microsoft SQL Server, Microsoft Azure Databricks, Microsoft Fabric.&lt;/li&gt;&lt;li&gt;Strong understanding of data modeling techniques and best practices.&lt;/li&gt;&lt;li&gt;Experience in designing and implementing data integration solutions.&lt;/li&gt;&lt;li&gt;Familiarity with data governance principles and practices. &lt;b&gt;Additional Information:&lt;/b&gt; &lt;/li&gt;&lt;li&gt;The candidate should have minimum 3 years of experience in Microsoft Azure Data Services.&lt;/li&gt;&lt;li&gt;This position is based at our Bengaluru office.&lt;/li&gt;&lt;li&gt;A 15 years full time education is required.&lt;b&gt;Qualification&lt;/b&gt;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/815548/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/815548/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Mon, 09 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/815543/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;Project Role :&lt;/b&gt;Data Architect&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Azure Data Services&lt;br&gt;&lt;b&gt;Good to have skills :&lt;/b&gt;Python (Programming Language), Data Engineering, Microsoft Power Business Intelligence (BI)&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:&lt;br&gt;As a Data Architect, you will define the data requirements and structure for the application. A typical day involves collaborating with various teams to model and design the application data structure, ensuring optimal storage and integration solutions are in place. You will engage in discussions to align data strategies with business objectives, while also addressing any challenges that arise in the data architecture process. Your role will require a keen understanding of data flows and the ability to translate complex requirements into actionable designs, fostering a collaborative environment that encourages innovation and efficiency. &lt;b&gt;Roles &amp;amp; Responsibilities:&lt;/b&gt;&lt;br&gt;&lt;li&gt;Expected to be an SME.&lt;/li&gt;&lt;li&gt;Collaborate and manage the team to perform.&lt;/li&gt;&lt;li&gt;Responsible for team decisions.&lt;/li&gt;&lt;li&gt;Engage with multiple teams and contribute on key decisions.&lt;/li&gt;&lt;li&gt;Provide solutions to problems for their immediate team and across multiple teams.&lt;/li&gt;&lt;li&gt;Facilitate knowledge sharing sessions to enhance team capabilities.&lt;/li&gt;&lt;li&gt;Develop and maintain documentation related to data architecture and design. &lt;b&gt;Professional &amp;amp; Technical Skills:&lt;/b&gt;&lt;br&gt;&lt;b&gt;Must To Have Skills:&lt;/b&gt;&lt;br&gt;Proficiency in Microsoft Azure Data Services.&lt;/li&gt;&lt;li&gt;Good To Have Skills:Experience with Data Engineering, Python (Programming Language), Microsoft Power Business Intelligence (BI).&lt;/li&gt;&lt;li&gt;Strong understanding of data modeling techniques and best practices.&lt;/li&gt;&lt;li&gt;Experience with data integration tools and ETL processes.&lt;/li&gt;&lt;li&gt;Familiarity with cloud-based data storage solutions and architectures. &lt;b&gt;Additional Information:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;The candidate should have minimum 7.5 years of experience in Microsoft Azure Data Services.&lt;/li&gt;&lt;li&gt;This position is based at our Hyderabad office.&lt;/li&gt;&lt;li&gt;A 15 years full time education is required.&lt;b&gt;Qualification&lt;/b&gt;&lt;br&gt;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/815543/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/815543/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Mon, 09 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814516/mainframe-db2-dba-at-tata-consultancy/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&lt;strong&gt;TCS has been a great pioneer in feeding the fire of young techies like you. We are a global leader in the technology arena and theres nothing that can stop us from growing together.&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;What we are looking for&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Role: Mainframe DB2 DBA&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Experience Range: 7 15 Years&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Location : Bangalore/Mumbai&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Must Have:&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;&lt;strong&gt;Experience in Mainframe Db2 v12/ v13 Database System-Administration on z/OS&lt;/strong&gt;&lt;/li&gt;&lt;li&gt;&lt;strong&gt;DB2 installation, upgrade, patching, configuration and operations&lt;/strong&gt;&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Proficiency in DB2, VSAM Systems Performance Tuning and Capacity Planning&lt;/strong&gt;&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Experience in Data Sharing setup in Parallel Sysplex environment&lt;/strong&gt;&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Knowledge in ACF2/RACF, CICS v5.5, Mainframe MQ v9&lt;/strong&gt;&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Experience in SMPE and ISV tools installation and migration&lt;/strong&gt;&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Good to Have:&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;&lt;strong&gt;Database DR, Cloning, Backup/Recovery, HA and Point In Time Recovery (PITR) expertise&lt;/strong&gt;&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Experience with Database ISV Tools and Products like OmegaMon and AdminTool&lt;/strong&gt;&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Application related performance tuning; Write, modify, and debug database specific SQL queries&lt;/strong&gt;&lt;/li&gt;&lt;li&gt;&lt;strong&gt;DBA related utilities like Unload, Load, DSNJU003, DSNJU004,Reorg, Runstats, image copy&lt;/strong&gt;&lt;/li&gt;&lt;li&gt;&lt;strong&gt;REXX, CLIST, JCL, JES and Assembler knowledge&lt;/strong&gt;&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Essential:&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;&lt;strong&gt;Define and provide guidance for database creation, configuration, upgrade, patches and refresh requirements&lt;/strong&gt;&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Recommend operating system and database performance monitoring, tuning and configuration changes&lt;/strong&gt;&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Experience with database physical and logical design, query analysis and optimization&lt;/strong&gt;&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Provide technical assistance and subject matter expertise to applications groups. Provide database storage management and capacity management recommendations&lt;/strong&gt;&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Minimum Qualification:&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;&lt;strong&gt;15 years of full-time education&lt;/strong&gt;&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Minimum percentile of 50% in 10th, 12th, UG &amp;amp; PG (if applicable)&lt;/strong&gt;&lt;/li&gt;&lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Tata Consultancy&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814516/mainframe-db2-dba-at-tata-consultancy/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814516/mainframe-db2-dba-at-tata-consultancy/</link>
  <title>[Full Time] Mainframe DB2 DBA at Tata Consultancy</title>
  <dc:date>Sun, 08 Mar 2026 00:59:33 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814728/oracle-cerner-ehr-systems-engineerambulatoryambulatory-scheduling-at-luxoft/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt;&lt;div&gt;&lt;b&gt;Project description &lt;/b&gt;&lt;div&gt;&lt;div&gt;Support clients in the operation, maintenance, and optimization of Oracle Cerner EHR environments. This role is designed for early-career professionals who are eager to grow their technical skills in healthcare IT while working under the mentorship of experienced consultants and technical leaders. You will gain hands-on exposure to Cerner infrastructure, system workflows, and healthcare technology best practices while contributing to meaningful client outcomes. &lt;/div&gt;&lt;/div&gt;&lt;div&gt;&lt;strong&gt;&lt;/strong&gt;&lt;/div&gt;&lt;div&gt;&lt;strong&gt;Responsibilities&lt;/strong&gt;&lt;/div&gt;&lt;div&gt;&lt;div&gt;&lt;div&gt;Support day-to-day operational and project-based activities for Oracle Cerner clinical and revenue cycle applications (e.g., PowerChart, FirstNet, SurgiNet, PathNet, Millennium applications) under the guidance of senior consultants &lt;div&gt;&lt;li&gt;Assist with application build, configuration, and maintenance, including orderables, documentation templates, clinical workflows, and rules &lt;/li&gt;&lt;li&gt;Participate in system analysis to understand clinical and operational workflows and translate requirements into Cerner application solutions &lt;/li&gt;&lt;li&gt;Support application testing activities, including unit testing, integrated testing, and regression testing, following established test scripts and validation procedures &lt;/li&gt;&lt;li&gt;Assist in issue investigation and troubleshooting, including workflow issues, application defects, and end-user reported concerns; escalate appropriately when needed &lt;/li&gt;&lt;li&gt;Support go-live, optimization, and post-go-live stabilization activities, including issue tracking and resolution support &lt;/li&gt;&lt;li&gt;Develop foundational knowledge of Cerner Millennium architecture, application domains, and data flows &lt;/li&gt;&lt;li&gt;Collaborate with cross-functional teams including technical, reporting, clinical, and operational stakeholders &lt;/li&gt;&lt;li&gt;Interface with Oracle Cerner representatives during application troubleshooting, upgrades, or enhancement activities as directed &lt;/li&gt;&lt;li&gt;Create and maintain application documentation, including build specifications, workflow diagrams, and status reports &lt;/li&gt;&lt;li&gt;Participate in regular team and client meetings to communicate application status, risks, and upcoming milestones &lt;/li&gt;&lt;li&gt;Ensure compliance with data security, regulatory requirements, and protected health information (PHI) standards &lt;/li&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;div&gt;&lt;strong&gt;&lt;/strong&gt;&lt;/div&gt;&lt;div&gt;&lt;strong&gt;Skills &lt;/strong&gt;&lt;/div&gt;&lt;div&gt;&lt;strong&gt;Must have &lt;/strong&gt;&lt;/div&gt;&lt;div&gt;&lt;div&gt;&lt;div&gt;&lt;/div&gt;&lt;div&gt;&lt;strong&gt;Module: &lt;/strong&gt;Ambulatory + Ambulatory Scheduling &lt;div&gt;&lt;li&gt;&lt;strong&gt;Skillset:&lt;/strong&gt; Outpatient clinical workflows &lt;/li&gt;&lt;li&gt;=============================================== &lt;/li&gt;&lt;li&gt;Strong curiosity and desire to learn healthcare technology and EHR systems &lt;/li&gt;&lt;li&gt;Self-motivated with the ability to manage time, tasks, and documentation effectively &lt;/li&gt;&lt;li&gt;Ability to prioritize work, meet deadlines, and deliver high-quality results &lt;/li&gt;&lt;li&gt;Collaborative, consultative mindset with the ability to work across technical and non-technical teams &lt;/li&gt;&lt;li&gt;Attention to detail and commitment to accuracy in system build and documentation &lt;/li&gt;&lt;li&gt;Professional integrity and accountability when working with sensitive and regulated data &lt;/li&gt;&lt;li&gt;Healthcare IT exp is a MUST &lt;/li&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;div&gt;&lt;strong&gt;&lt;/strong&gt;&lt;/div&gt;&lt;div&gt;&lt;strong&gt;Nice to have &lt;/strong&gt;&lt;/div&gt;&lt;div&gt;&lt;div&gt;&lt;div&gt;&lt;/div&gt;&lt;div&gt;Certifications, provider templatesAmbulatory quality metrics &lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;div&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Legal&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Luxoft&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Pune&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814728/oracle-cerner-ehr-systems-engineerambulatoryambulatory-scheduling-at-luxoft/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814728/oracle-cerner-ehr-systems-engineerambulatoryambulatory-scheduling-at-luxoft/</link>
  <title>[Full Time] Oracle Cerner Ehr Systems Engineer(ambulatory+ambulatory Scheduling) at Luxoft</title>
  <dc:date>Fri, 06 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814679/etl-developer-at-dxc-technology/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;b&gt; &lt;span&gt; Skills - Essential &lt;/span&gt; &lt;/b&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; Data Transformation and ETL AWS Glue &lt;/li&gt; &lt;li&gt; Development experience in the Amazon Cloud Environment AWS (S3, AWS Glue, Amazon Redshift, Data Lake) &lt;/li&gt; &lt;li&gt; Ability to perform data manipulations, load, extract from several sources of data into another schema &lt;/li&gt; &lt;li&gt; Understanding of core AWS services, and basic AWS architecture best practices. &lt;/li&gt; &lt;li&gt; Create ETL jobs using Python/PySpark to fulfil the requirements &lt;/li&gt; &lt;li&gt; Proficiency in developing, deploying, and debugging cloud-based data assets. &lt;/li&gt; &lt;li&gt; Ability to understand requirements and changes to requirements &lt;/li&gt; &lt;li&gt; Knowledge and experience of modem software design and development methodologies &amp;amp; frameworks (e.g. Agile/Scrum) &lt;/li&gt; &lt;li&gt; Should be self-motivated and be able to work in a team and drive the team to success &lt;/li&gt; &lt;li&gt; Flexible to learn and adapt to new technologies and skills needed in the project &lt;/li&gt; &lt;li&gt; Strong communication skills - in onsite / offshore engagement models &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;b&gt; &lt;span&gt; Skills - Desirable &lt;/span&gt; &lt;/b&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; Work hands-on in QlikView / Qlik Sense development &lt;/li&gt; &lt;li&gt; Experience of Qlik Compose &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;br&gt; &lt;b&gt; Basic Qualifications: &lt;/b&gt; &lt;/div&gt; &lt;div&gt; &lt;br&gt; Bachelors degree in a relevant field (i.e., Computer Science) or equivalent combination of education and experience &lt;br&gt; Typically, 6+ years of relevant work experience in industry, with a minimum of 2+ years in a similar role years of experience in software engineering &lt;br&gt; Proficiency in 1 or more software languages and development methodologies &lt;br&gt; Strong programming and debugging skills &lt;br&gt; Some prior experience in a leadership or mentoring capacity &lt;br&gt; Familiarity with software development methodologies and best practices &lt;br&gt; Good communication and teamwork skills &lt;br&gt; Willingness to learn and grow in a leadership role &lt;br&gt; Strong organizational and time management skills &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;ETL Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;DXC Technology&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814679/etl-developer-at-dxc-technology/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814679/etl-developer-at-dxc-technology/</link>
  <title>[Full Time] ETL Developer at DXC Technology</title>
  <dc:date>Fri, 06 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814648/data-architect-at-addrec-solutions/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;Job Description &lt;p&gt; &lt;strong&gt;Job Description:&lt;/strong&gt; &lt;/p&gt; &lt;p&gt;8 + Years of exp in the Design and implementation of Data Integrations using MS Azure Cloud services&lt;/p&gt; &lt;p&gt;Experience in Azure Data Lake including ADF/Databricks.&lt;/p&gt; &lt;p&gt;4+ experience in building data applications on Azure cloud using Azure Data factory/Synapse/Data bricks etc.&lt;/p&gt; &lt;p&gt;Experience in CI/CD implementation for Azure Data Platform Services like ADF, ADB, Synapsis Analytics, Azure SQL DB etc.&lt;/p&gt; &lt;p&gt;Experience with NoSQL datastores like Cassandra, Elasticsearch, HBase, MongoDB.&lt;/p&gt; &lt;p&gt;Proven skills in designing, tuning &amp;amp; optimizing scalable, highly available distributed systems which can handle high data volumes.&lt;/p&gt; &lt;p&gt;Strong understanding of software engineering principles and fundamentals including data structures and algorithms.&lt;/p&gt; &lt;p&gt;Strong experience of working with APIs and integrating multiple applications together.&lt;/p&gt; &lt;p&gt;Extensive experience with version control systems like TFS / Git and their use in release management, branching, merging, and integration strategies.&lt;/p&gt; &lt;p&gt;Hands-on experience in using build servers like Azure Build and Release Pipelines or Jenkins.&lt;/p&gt; &lt;p&gt;Practical experience of using version control systems (Azure Repo preferred).&lt;/p&gt; &lt;p&gt;Experience in monitoring, debugging, maintaining, and optimizing pipelines in Azure&lt;/p&gt; &lt;p&gt;Experience in automation of pipeline alerts, failure recovery/restart, logging, and reporting KPIs of pipelines and Triggers&lt;/p&gt; &lt;p&gt;Experience in scheduled triggers, link services, pipeline validations, 3rd party system/service integrations/connectivity etc.&lt;/p&gt; &lt;p&gt;Experience in identifying and resolving recurring issues, envisioning, and implementing service improvements etc.&lt;/p&gt; &lt;p&gt;Excellent Communication skills and experience in client facing roles&lt;/p&gt; &lt;p&gt; &lt;strong&gt;Good to have skills&lt;/strong&gt;: Knowledge on Azure Purview, Logic Apps, Functional Apps etc. Visualization tools PowerBI and Tableau&lt;/p&gt; &lt;p&gt; &lt;strong&gt;Roles &amp;amp; Responsibilities:&lt;/strong&gt; &lt;/p&gt; &lt;p&gt;Work with DE Practice Lead and clients to understand business problems, industry context, data sources, potential risks, and constraints.&lt;/p&gt; &lt;p&gt;Collaborate with Leadership provide meaningful and credible feedback on Data Engineering capabilities, data availability, and customer trend information.&lt;/p&gt; &lt;p&gt;Actively mentor and coach the team and help them realize the best solution to a problem.&lt;/p&gt; &lt;p&gt;Facilitating, guiding, and influencing the clients and teams towards right information technology architecture and becoming interface between Business leadership, Tech leadership and the delivery teams&lt;/p&gt; &lt;p&gt;Provide best practice advice to customers and team members.&lt;/p&gt; &lt;p&gt;Create an ecosystem that fosters innovation and encourages members of the CoE to build innovative solutions and publish papers/content in public domain.&lt;/p&gt; &lt;div&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Recruitment / Staffing&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Addrec Solutions&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814648/data-architect-at-addrec-solutions/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814648/data-architect-at-addrec-solutions/</link>
  <title>[Full Time] Data Architect at Addrec Solutions</title>
  <dc:date>Fri, 06 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814448/data-architect-data-at-happiest-minds/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;ul&gt;&lt;br&gt;&lt;li&gt;Design and implement scalable, secure, and efficient data architectures.&lt;/li&gt;&lt;br&gt;&lt;li&gt;Develop and maintain large-scale data systems and databases.&lt;/li&gt;&lt;br&gt;&lt;li&gt;Collaborate with cross-functional teams to identify business requirements and develop solutions.&lt;/li&gt;&lt;br&gt;&lt;li&gt;Ensure compliance with industry standards and best practices.&lt;/li&gt;&lt;br&gt;&lt;li&gt;Provide technical leadership and guidance to junior team members.&lt;/li&gt;&lt;br&gt;&lt;li&gt;Participate in code reviews and ensure high-quality deliverables.&lt;/li&gt;&lt;br&gt;&lt;/ul&gt;&lt;br&gt;&lt;h3&gt;Job Requirements&lt;/h3&gt;&lt;br&gt;&lt;ul&gt;&lt;br&gt;&lt;li&gt;Strong knowledge of data modeling, database design, and data warehousing concepts.&lt;/li&gt;&lt;br&gt;&lt;li&gt;Experience with big data technologies such as Hadoop, Spark, and NoSQL databases.&lt;/li&gt;&lt;br&gt;&lt;li&gt;Excellent problem-solving skills and attention to detail.&lt;/li&gt;&lt;br&gt;&lt;li&gt;Strong communication and collaboration skills.&lt;/li&gt;&lt;br&gt;&lt;li&gt;Ability to work in a fast-paced environment and adapt to changing priorities.&lt;/li&gt;&lt;br&gt;&lt;li&gt;Bachelor&apos;s degree in Computer Science or related field.&lt;/li&gt;&lt;br&gt;&lt;/ul&gt;Disclaimer: This job description has been sourced from a public domain and may have been modified by Naukri.com to improve clarity for our users. We encourage job seekers to verify all details directly with the employer via their official channels before applying.&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Happiest Minds&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814448/data-architect-data-at-happiest-minds/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814448/data-architect-data-at-happiest-minds/</link>
  <title>[Full Time] Data Architect Data at Happiest Minds</title>
  <dc:date>Fri, 06 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814692/guidewire-datahub-infocenter-developer-dhic-at-zensar/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;DHIC Developer JD &lt;/p&gt; &lt;p&gt;Must have skills &lt;/p&gt; &lt;ul&gt; &lt;li&gt;6-9 years of Experience in P&amp;amp;C Insurance on Guidewire DataHub/InfoCenter Platform.&lt;/li&gt; &lt;li&gt;Specialist Certifications on DHIC.&lt;/li&gt; &lt;li&gt;Experience on SQL Server databases.&lt;/li&gt; &lt;li&gt;Able to create PL/SQL stored procedures. &lt;/li&gt; &lt;li&gt;Experience on Guidewire ClaimCenter/ PolicyCenter/ BillingCenter data models. &lt;/li&gt; &lt;li&gt;Experience on SAP BODS ETL design &amp;amp; Administration.&lt;/li&gt; &lt;li&gt;Experience on Data Warehousing that includes analysis and development of Dataflows, mappings using needed transformations using BODS.&lt;/li&gt; &lt;li&gt;Experience in mapping Guidewire Insurance Suite of products (PC/BC/CC/CM) to DHIC.&lt;/li&gt; &lt;li&gt;Works with business in identifying detailed analytical and operational reporting/extracts requirements.&lt;/li&gt; &lt;li&gt;Experience building downstream extracts using DHIC.&lt;/li&gt; &lt;li&gt; Able to create Microsoft SQL / ETL complex queries.&lt;/li&gt; &lt;li&gt;Experience on Updating Data Specifications&lt;/li&gt; &lt;li&gt;Experience on DataHub and InfoCenter Initial loads and Delta loads.&lt;/li&gt; &lt;li&gt;Experience on DataHub and InfoCenter Guidewire Commit and Rollback utility.&lt;/li&gt; &lt;li&gt;Experience on Extending entities &amp;amp; attributes in DataHub and InfoCenter&lt;/li&gt; &lt;li&gt;Experience on Ref &amp;amp; Config Data spreadsheet maintenance.&lt;/li&gt; &lt;li&gt;Experience on Scheduling loads, solving integrity check issues, and balancing errors.&lt;/li&gt; &lt;li&gt;Experience on performance improvements on SAP BODS workflows especially on SQL database as source.&lt;/li&gt; &lt;li&gt;Works with business in identifying detailed analytical and operational reporting/extracts requirements.&lt;/li&gt; &lt;li&gt;Must have at least one DHIC on-premises to DHIC Cloud Upgrade experience &lt;/li&gt; &lt;/ul&gt; &lt;p&gt;Good to have Skills:&lt;/p&gt; &lt;ul&gt; &lt;li&gt;SAP BODS administration, CICD.&lt;/li&gt; &lt;li&gt;Data Modelling Experience.&lt;/li&gt; &lt;/ul&gt; &lt;p&gt;Mentioned above.&lt;/p&gt; &lt;p&gt;Mentioned above.&lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;ETL Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Zensar&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Pune&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814692/guidewire-datahub-infocenter-developer-dhic-at-zensar/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814692/guidewire-datahub-infocenter-developer-dhic-at-zensar/</link>
  <title>[Full Time] Guidewire Datahub/infocenter Developer - Dhic at Zensar</title>
  <dc:date>Fri, 06 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814562/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Informatica MDM&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;Accenture Delivery Methods (ADM)&lt;br&gt;Minimum &lt;b&gt;3&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;br&gt;&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the data architecture aligns with business objectives and supports efficient data management practices. You will collaborate with various stakeholders to gather requirements and translate them into effective data solutions, while also addressing any challenges that arise in the data design process. Roles &amp;amp; Responsibilities:- Expected to perform independently and become an SME.- Required active participation/contribution in team discussions.- Contribute in providing solutions to work related problems.- Engage in continuous learning to stay updated with industry trends and best practices in data architecture.- Collaborate with cross-functional teams to ensure data integration and consistency across applications. Professional &amp;amp; Technical &lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;li&gt;- Must To Have &lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;Proficiency in Informatica MDM.- Good To Have &lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;Experience with Accenture Delivery Methods (ADM).- Strong understanding of data modeling techniques and best practices.- Experience with data governance and data quality management.- Familiarity with cloud data storage solutions and integration methods. Additional Information:- The candidate should have minimum 3 years of experience in Informatica MDM.- This position is based at our Bengaluru office.- A 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814562/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814562/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Thu, 05 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814527/aws-data-architect-at-tata-consultancy/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&lt;strong&gt;Role &amp;amp; responsibilities&lt;/strong&gt; &lt;/p&gt;&lt;ul&gt;&lt;li&gt;Relational SQL/ Caching expertise Deep knowledge of Amazon Aurora PostgreSQL, ElastiCache etc..&lt;/li&gt;&lt;li&gt;Data modeling Experience in OLTP and OLAP schemas, normalization, denormalization, indexing, and partitioning.&lt;/li&gt;&lt;li&gt;Schema design &amp;amp; migration  Defining best practices for schema evolution when migrating from SQL Server to PostgreSQL.&lt;/li&gt;&lt;li&gt;Data governance  Designing data lifecycle policies, archival strategies, and regulatory compliance frameworks.&lt;/li&gt;&lt;li&gt;AWS Glue &amp;amp; AWS DMS  Leading data migration strategies to Aurora PostgreSQL.&lt;/li&gt;&lt;li&gt;ETL &amp;amp; Data Pipelines  Expertise in Extract, Transform, Load (ETL) workflows . Glue jobs features and event-driven architectures.&lt;/li&gt;&lt;li&gt;Data transformation &amp;amp; mapping  PostgreSQL PL/pgSQL migration / transformation expertise while ensuring data integrity.&lt;/li&gt;&lt;li&gt;Cross-platform data integration  Connecting cloud and on-premises / other cloud data sources.&lt;/li&gt;&lt;li&gt;AWS Data Services  Strong experience in S3, Glue, Lambda, Redshift, Athena, and Kinesis.&lt;/li&gt;&lt;li&gt;Infrastructure as Code (IaC)  Using Terraform, CloudFormation, or AWS CDK for database provisioning.&lt;/li&gt;&lt;li&gt;Security &amp;amp; Compliance  Implementing IAM, encryption (AWS KMS), access control policies, and compliance frameworks (eg. GDPR ,PII).&lt;/li&gt;&lt;li&gt;Query tuning &amp;amp; indexing strategies  Optimizing queries for high performance.&lt;/li&gt;&lt;li&gt;Capacity planning &amp;amp; scaling  Ensuring high availability, failover mechanisms, and auto-scaling strategies.&lt;/li&gt;&lt;/ul&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Preferred candidate profile&lt;/strong&gt; &lt;/p&gt;&lt;p&gt; &lt;/p&gt;&lt;ul&gt;&lt;li&gt;Data Warehousing  Expertise in Amazon Redshift, Snowflake, or BigQuery.&lt;/li&gt;&lt;li&gt;Big Data Processing  Familiarity with Apache Spark, EMR, Hadoop, or Kinesis.&lt;/li&gt;&lt;li&gt;Data Lakes &amp;amp; Analytics  Experience in AWS Lake Formation, Glue Catalog, and Athena.&lt;/li&gt;&lt;li&gt;Machine Learning Pipelines  Understanding of SageMaker, BedRock etc. for AI-driven analytics.&lt;/li&gt;&lt;li&gt;CI/CD for Data Pipelines  Knowledge of AWS CodePipeline, Jenkins, or GitHub Actions.&lt;/li&gt;&lt;li&gt;Serverless Data Architectures  Experience with event-driven systems (SNS, SQS, Step Functions).&lt;/li&gt;&lt;/ul&gt;&lt;br&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Tata Consultancy&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814527/aws-data-architect-at-tata-consultancy/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814527/aws-data-architect-at-tata-consultancy/</link>
  <title>[Full Time] AWS Data Architect at Tata Consultancy</title>
  <dc:date>Thu, 05 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814702/cloud-native-app-developer-standard-at-infogain/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;p&gt; &lt;strong&gt; &lt;span&gt;Core Skills&lt;/span&gt; &lt;/strong&gt; &lt;/p&gt; &lt;p&gt;Advanced cloud technologies and programming Developing and implementing cloud-based applications using frameworks such as Node.js or .NET Core Deep understanding of database technologies such as SQL Server or Oracle Knowledge of software development methodologies and best practices Familiarity with cloud-native architecture and DevOps practices Strong communication and collaboration skills&lt;/p&gt; &lt;/div&gt; EXPERIENCE &lt;ul&gt; &lt;li&gt;4.5-6 Years&lt;/li&gt; &lt;/ul&gt; SKILLS &lt;ul&gt; &lt;li&gt;Primary Skill: CNA Development&lt;/li&gt; &lt;li&gt;Sub Skill(s): CNA Development &lt;/li&gt; &lt;li&gt;Additional Skill(s): .NET Core, .NET Web API (restful APIs)&lt;/li&gt; &lt;/ul&gt; ABOUT THE COMPANY &lt;span&gt; &lt;p&gt;Infogain is a human-centered digital platform and software engineering company based out of Silicon Valley. We engineer business outcomes for Fortune 500 companies and digital natives in the technology, healthcare, insurance, travel, telecom, and retail &amp;amp; CPG industries using technologies such as cloud, microservices, automation, IoT, and artificial intelligence. We accelerate experience-led transformation in the delivery of digital platforms. Infogain is also a Microsoft (NASDAQ: MSFT) Gold Partner and Azure Expert Managed Services Provider (MSP).&lt;/p&gt; &lt;p&gt;Infogain, an Apax Funds portfolio company, has offices in California, Washington, Texas, the UK, the UAE, and Singapore, with delivery centers in Seattle, Houston, Austin, Krak w, Noida, Gurgaon, Mumbai, Pune, and Bengaluru.&lt;/p&gt; &lt;/span&gt; &lt;div&gt; &lt;div&gt;&lt;/div&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Infogain&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814702/cloud-native-app-developer-standard-at-infogain/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814702/cloud-native-app-developer-standard-at-infogain/</link>
  <title>[Full Time] Cloud Native App Developer (Standard) at Infogain</title>
  <dc:date>Thu, 05 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814884/data-architect-data-at-happiest-minds/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; Define and lead the enterprise migration strategy from SSAS (Multidimensional &amp;amp; Tabular) to Power Bl Semantic Models, ensuring minimal business disruption and long-term scalability.&lt;/p&gt;&lt;p&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; Decompose complex MDX cubes into 23 domain-aligned Power Bl Semantic Models, ensuring functional equivalence, improved usability, and optimized performance.&lt;/p&gt;&lt;p&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; Analyze existing SSAS multidimensional cube structures (dimensions, hierarchies, KPls, calculated members, perspectives) and translate them into optimized tabular/DAX-based models.&lt;/p&gt;&lt;p&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; Lead MDX-to-DAX translation strategy, redesigning calculated members, scoped assignments, and cube scripts into efficient DAX measures and calculation groups.&lt;/p&gt;&lt;p&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; Define target-state semantic architecture within Power Bl Service including certified datasets, shared semantic layers, thin report strategy, and workspace governance.&lt;/p&gt;&lt;p&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; Establish migration playbooks including:&lt;/p&gt;&lt;p&gt;a.&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; Cube inventory and dependency mapping&lt;/p&gt;&lt;p&gt;b.&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; Feature parity analysis&lt;/p&gt;&lt;p&gt;c.&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; Performance benchmarking (before vs after)&lt;/p&gt;&lt;p&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; Data validation and reconciliation framework&lt;/p&gt;&lt;p&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; Design enterprise modeling standards aligned with Kimball methodology, including conformed dimensions, SCD handling, surrogate keys, and cross-model dimension reuse.&lt;/p&gt;&lt;p&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; Architect performance optimization strategies such as aggregations, composite models, hybrid tables, incremental refresh, and partitioning.&lt;/p&gt;&lt;p&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; Provide architectural oversight for model development using tools like Tabular Editor and performance diagnostics with DAX Studio.&lt;/p&gt;&lt;p&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; Define DevOps and CI/CD framework for semantic models using XMLA endpoints, version control (Git), and structured deployment pipelines.&lt;/p&gt;&lt;p&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; Establish and enforce governance controls including RLS/OLS security models, workspace design, naming conventions, and metadata documentation standards.&lt;/p&gt;&lt;p&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; Oversee user acceptance testing (UAT), reconciliation validation against legacy cube outputs, cutover planning, and post-migration stabilization support.&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Desired Skills (Plus Points):&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; Implementation experience with Azure Analysis Services (MDX).&lt;/p&gt;&lt;p&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; Command over MDX Language.&lt;/p&gt;&lt;p&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; Certifications in Microsoft Bl tec&lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Happiest Minds&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814884/data-architect-data-at-happiest-minds/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814884/data-architect-data-at-happiest-minds/</link>
  <title>[Full Time] Data Architect - Data at Happiest Minds</title>
  <dc:date>Thu, 05 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814459/data-architect-at-wipro/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;Technology Skills and Project Experience:&lt;/p&gt;&lt;p&gt;-&amp;nbsp;&amp;nbsp; 15+ years experience of the full software development lifecycle on BI projects and support experience.&lt;/p&gt;&lt;p&gt;-&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;10+ years experience on working on data objects pertaining both master and transactional data in Purchasing, Material, Sales Orders and Supply chain in SAP &amp;amp; Oracle ERP is mandatory&lt;/p&gt;&lt;p&gt;-&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 8+ years in Dimensional Data modelling experience (STAR &amp;amp; Snowflake)&lt;/p&gt;&lt;p&gt;-&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;Expertise in AWS services such as Redshift, Database Migration Service, Glue, S3, and Schema Conversion Tool.&lt;/p&gt;&lt;p&gt;-&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; Any one data modeling tool experience is mandatory (Erwin or any other tool)&lt;/p&gt;&lt;p&gt;-&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;Need to be strong in SQL query writing and procedures&lt;/p&gt;&lt;p&gt;-&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;Experience in following components (or similar):&lt;/p&gt;&lt;p&gt;-&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; BI tools: Tableau, Oracle Business Intelligence, PowerBI&lt;/p&gt;&lt;p&gt;-&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;ETL tools: Informatica PowerCenter, PL/SQL, IICS&lt;/p&gt;&lt;p&gt;-&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;Data bases: RedShift, Oracle, PostgreSQL, Exasol,&lt;/p&gt;&lt;p&gt;-&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; Other: object-oriented programming language (python)&lt;/p&gt;&lt;p&gt;-&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;Experience managing servers both Cloud and On-prem (ideally experienced in AWS&lt;strong&gt;Role &amp;amp; responsibilities&lt;/strong&gt; &lt;/p&gt;&lt;p&gt;&lt;strong&gt;Preferred candidate profile&lt;/strong&gt; &lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Wipro&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814459/data-architect-at-wipro/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814459/data-architect-at-wipro/</link>
  <title>[Full Time] Data Architect at Wipro</title>
  <dc:date>Wed, 04 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814069/data-architect-at-altimetrik/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&lt;strong&gt;Exp:&lt;/strong&gt; 10+ Yrs Experience&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Location&lt;/strong&gt;: BLR, CHENNAI, HYD, PUNE or Gurugram&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Roles and responsibilities:&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Interface with customer to gather requirements, design solutions &amp;amp; make recommendations&lt;/li&gt;&lt;li&gt;Lead customer project conference calls or interface with a Project Manager&lt;/li&gt;&lt;li&gt;Deliver Technical Specifications documents for customer review&lt;/li&gt;&lt;li&gt;Strong collaboration with team software engineer consultants onshore &amp;amp; offshore&lt;/li&gt;&lt;li&gt;Leverage understanding of data relationships and schemas to structure data to allow clients to perform dynamic customer-level analysis&lt;/li&gt;&lt;li&gt;Construct processes to build Customer ID mapping files for use in building 360 degree view of customer across data sources.&lt;/li&gt;&lt;li&gt;Leverage scripting languages to automate key processes governing data movement, cleansing, and processing activities&lt;/li&gt;&lt;li&gt;Bill &amp;amp; forecast time toward customer projects&lt;/li&gt;&lt;li&gt;Innovate on new ideas to solve customer needs&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Requirement: &lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;&lt;strong&gt;10+ years &lt;/strong&gt;of strong experience with data transformation &amp;amp; &lt;strong&gt;ETL &lt;/strong&gt;on large data sets&lt;/li&gt;&lt;li&gt;Experience with designing customer centric datasets (i.e., CRM, Call Center, Marketing, Offline, Point of Sale etc.)&lt;/li&gt;&lt;li&gt;5+ years of &lt;strong&gt;Data Modeling&lt;/strong&gt; experience (i.e., Relational, Dimensional, Columnar, Big Data)&lt;/li&gt;&lt;li&gt;5+ years of complex &lt;strong&gt;SQL or NoSQL experience&lt;/strong&gt;&lt;/li&gt;&lt;li&gt;Experience in advanced &lt;strong&gt;Data Warehouse concepts&lt;/strong&gt;&lt;/li&gt;&lt;li&gt;Experience in industry ETL tools (i.e., Informatica, Unifi)&lt;/li&gt;&lt;li&gt;Experience with Business Requirements definition and management, structured analysis, process design, use case documentation&lt;/li&gt;&lt;li&gt;Experience with &lt;strong&gt;Reporting Technologies &lt;/strong&gt;(i.e., Tableau, PowerBI)&lt;/li&gt;&lt;li&gt;Experience in professional software development&lt;/li&gt;&lt;li&gt;Demonstrate exceptional organizational skills and ability to multi-task simultaneous different customer projects&lt;/li&gt;&lt;li&gt;Strong verbal &amp;amp; written communication skills to interface with Sales team &amp;amp; lead customers to successful outcome&lt;/li&gt;&lt;li&gt;Must be self-managed, proactive and customer focused&lt;/li&gt;&lt;li&gt;Degree in Computer Science, Information Systems, Data Science, or related field&lt;/li&gt;&lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Architect / Designer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Altimetrik&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814069/data-architect-at-altimetrik/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814069/data-architect-at-altimetrik/</link>
  <title>[Full Time] Data Architect at Altimetrik</title>
  <dc:date>Wed, 04 Mar 2026 11:04:24 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814098/manager-data-operations-engineering-at-pfizer/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt; &lt;b&gt; ROLE SUMMARY&lt;/b&gt; &lt;/p&gt; &lt;p&gt; Pfizer s purpose is to deliver breakthroughs that change patients lives. Research and Development is at the heart of fulfilling Pfizer s purpose as we work to translate advanced science and technologies into the therapies and vaccines that matter most. Whether you are in the discovery sciences, ensuring drug safety and efficacy or supporting clinical trials, you will apply cutting edge design and process development capabilities to accelerate and bring the best in class medicines to patients around the world. &lt;/p&gt; &lt;p&gt; The &lt;b&gt; Data Operations Lead is a hands on data engineering leader &lt;/b&gt; responsible for operating, stabilizing, and continuously improving a large scale enterprise data platform that provides trusted data to more than 400 AI and analytical solutions across Pfizer Global Supply. &lt;/p&gt; &lt;p&gt; This role leads a technical data operations team while remaining deeply involved in complex investigations, code reviews, and engineering decisions. The primary objective is to ensure data reliability, responsiveness, and trust at enterprise scale, by applying strong data engineering practices, enforcing coding and operational standards, and delivering predictable service outcomes for business critical analytics and AI workloads&lt;b&gt; . &lt;/b&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;b&gt; ROLE RESPONSIBILITIES&lt;/b&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; &lt;b&gt; Data Engineering Operations Leadership&lt;/b&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; Manage a hands on data engineering operations team responsible for supporting production data pipelines, databases, and AI data products. Ensure issues are investigated and resolved using strong engineering discipline, clear ownership, and consistent technical standards&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; &lt;b&gt; Hands On Data Engineering &amp;amp; Troubleshooting&lt;/b&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; Remain actively hands on in complex investigations involving Python code, SQL logic, data pipelines, transformations, and database behavior. Review code, debug data issues, validate fixes, and guide engineers toward durable solutions. &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; &lt;b&gt; Engineering Root Cause Analysis &amp;amp; Prevention&lt;/b&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; Drive deep technical root cause analysis across ingestion, transformation, and consumption layers. Ensure recurring issues are addressed through code improvements, refactoring, better validations, or architectural fixes, rather than temporary workarounds. &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; &lt;b&gt; Engineering Standards, Code Quality &amp;amp; Reviews&lt;/b&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; Define, enforce, and evolve data engineering coding standards, including Python and SQL best practices, version control discipline, and code review expectations. Ensure all operational fixes meet quality, reliability, and maintainability standards even under production pressure. &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; &lt;b&gt; SLA Ownership Through Engineering Excellence&lt;/b&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; Define, implement, and improve SLAs for data operations by reducing manual intervention, improving automation, and raising engineering quality. Track operational performance and continuously improve response and resolution outcomes through engineering improvements. &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; &lt;b&gt; AI Application Front Line Support&lt;/b&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; Serve as the front line technical leader for AI and data driven applications, supporting model outputs, data pipelines feeding AI solutions, feature/embedding generation, and downstream data consumers. Diagnose data related AI issues and ensure fixes align with engineering best practices. &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; &lt;b&gt; Database &amp;amp; Platform Reliability&lt;/b&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; Own operational reliability across data platforms and databases, including schema management, query performance, access patterns, and data correctness. Ensure production data behavior is well understood, monitored, and documented. &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; &lt;b&gt; Stakeholder Communication &amp;amp; Trust Restoration&lt;/b&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; Provide clear, technically grounded communication to stakeholders regarding data issues, impacts, and remediation actions. Set realistic expectations and rebuild trust through predictable execution, transparency, and engineering credibility. &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;b&gt; Professional Experience and Educational Requirement&lt;/b&gt; &lt;/p&gt; &lt;p&gt; &lt;b&gt; Education / Experience&lt;/b&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;b&gt; Bachelor s degree (Master s preferred) in Computer Science, Data Engineering, or a related technical field. &lt;/b&gt; &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;b&gt; 5 - 10 years of hands on Data Engineering experience&lt;/b&gt; , including operating and supporting production data systems. &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;b&gt; Experience leading or acting as a Technical Lead for Data engineering or Data operations teams. &lt;/b&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;b&gt; Technical (Must Have)&lt;/b&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;b&gt; Strong hands on programming experience with one or more general purpose languages, including Python, SQL, Java, Scala, PySpark, C, C++, C#, Swift/Objective C, or JavaScript. &lt;/b&gt; &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;b&gt; Proven experience with data preparation, ingestion, and ETL/ELT frameworks, such as Airflow, dbt, Fivetran, Kafka, Informatica, Talend, Alteryx, or equivalent technologies. &lt;/b&gt; &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; Strong experience with software engineering best practices, including version control (Git, TFS, Subversion), CI/CD pipelines (Jenkins, Maven, Gradle, or similar), automated unit testing, and DevOps practices. &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; Hands on experience with cloud data platforms and storage technologies, such as Snowflake, Databricks, Amazon S3, Redshift, BigQuery, or equivalent platforms. &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; Demonstrated experience architecting and operating end to end data pipelines, using cloud based and/or on premises stacks. &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; Prior hands on experience as a data modeler is required, including dimensional modeling and analytical data model design. &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; Strong understanding of database management fundamentals, including schemas, tables, views, permissions, query performance, and operational troubleshooting. &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; Proven ability to diagnose and resolve data quality issues at the engineering level, including logic errors, transformation issues, and source to target alignment. &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;b&gt; Leadership &amp;amp; Ways of Working&lt;/b&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; Proven ability to lead a technical team while remaining hands on. &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; Strong problem solving skills with a bias toward engineering-driven fixes. &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; Ability to define and enforce SLAs in a technical operations environment. &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; Strong stakeholder communication skills, especially in high impact data incidents. &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;b&gt; PREFERRED QUALIFICATIONS&lt;/b&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; Experience supporting AI or analytics applications in production environments. &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; Experience operating data platforms in large scale or regulated enterprise environments. &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; Familiarity with ITIL aligned incident/problem management applied pragmatically within engineering teams. &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; Knowledge of cloud computing, machine learning, text analytics, NLP, and web based application architectures. &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; Knowledge of ontologies and graph databases (e. g. , Neo4j, Titan) and associated query languages is a plus. &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; and information from varied data sources, both new and pre-existing, into discernable insights and perspectives; takes a problem-solving approach by connecting analytical thinking with an understanding of business drivers&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;b&gt; Adaptable: &lt;/b&gt; Demonstrates flexibility in the face of shifting targets, thrives in new situations&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;b&gt; Pioneering:&lt;/b&gt; Pushes self and others to think about new innovation and digital frontiers and ways to conquer them&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;b&gt; Ambiguity Tolerant: &lt;/b&gt; Successfully navigates ambiguity to keep the organization on target and deliver against established timelines&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;b&gt; Exceptional Communicator:&lt;/b&gt; Can understand, translate, and distill the complex, technical findings of the team into commentary that facilitates effective decision making by senior leaders; can readily align interpersonal style with the individual needs of customers&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;b&gt; Highly Collaborative:&lt;/b&gt; Manages projects with and through others; shares responsibility and credit; develops self and others through teamwork; comfortable providing guidance and sharing expertise with others to help them develop their skills and perform at their best; helps others take appropriate risks; communicates frequently with team members earning respect and trust of the team &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;b&gt; Proactive Self-Starter&lt;/b&gt; : Takes an active role in one s own professional development; stays abreast of analytical trends, and cutting-edge applications of data&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;b&gt; Creative: &lt;/b&gt; Able to bring forth new ideas to improve our existing practices and takes calculated risks to innovate new capabilities within Business Analytics, with a focus on data products and analytics solutions&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;span&gt; &lt;b&gt; &lt;/b&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;b&gt; NON-STANDARD WORK SCHEDULE, TRAVEL OR ENVIRONMENT REQUIREMENTS&lt;/b&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; 20% travel may be required based on delivery and project priorities&lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;br&gt; Work Location Assignment: Hybrid&lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; Information &amp;amp; Business Tech &lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Pharmaceutical &amp;amp; Life Sciences&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Pfizer&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Mumbai&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814098/manager-data-operations-engineering-at-pfizer/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814098/manager-data-operations-engineering-at-pfizer/</link>
  <title>[Full Time] Manager, Data Operations &amp;amp; Engineering at Pfizer</title>
  <dc:date>Wed, 04 Mar 2026 02:05:31 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814395/ux-designer-senior-at-infogain/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;p&gt;What you ll do&lt;/p&gt; &lt;p&gt;Lead design strategy for complex digital products with a mobile-first approach. Ensure a seamless, cohesive user experience across the UX ecosystem. Conduct usability testing and gather insights in collaboration with UX Researchers, leveraging both qualitative and quantitative methods. Drive the entire design process, from ideation to execution. Develop high-level design concepts, user flows, and interaction patterns to enhance usability. Effectively present projects to stakeholders and senior leadership. Foster a collaborative, inclusive team environment, encouraging innovation and shared ownership. Work closely with product managers, developers, and stakeholders within an Agile framework. Advocate for design best practices, accessibility, and usability standards. Solve complex usability challenges and continuously optimize interactions. Align design decisions with business objectives and technical constraints to ensure feasibility. Manage and evolve design frameworks, systems, and guidelines for consistency and scalability. Track and analyze UX metrics, using data-driven insights to refine and iterate on designs. What we re looking for At Charitable Impact s UX team, we embrace a thoughtful, empathetic, and user-first approach in everything we do. Our mindset is rooted in understanding and deep empathy. We seek team members who share our passion for making charitable giving a more engaging and integral part of society. charitableimpact.com Role Description: Senior UX Designer We re looking for a strategically minded self-starter who will champion our core values imagination, courage, kindness, respect, and generosity while fostering equitable and inclusive experiences for all. For this role specifically,&lt;/p&gt; &lt;p&gt;we are seeking:&lt;/p&gt; &lt;p&gt;5+ years of UX design experience with a strong track record of delivering impactful solutions. Undergraduate degree in a relevant discipline. A compelling portfolio showcasing expertise in both UX and UI design. Deep mastery of UX/UI principles, best practices, and industry-standard design tools (e.g., Figma, Abstract). Proficient in qualitative and quantitative research to inform design decisions. Experience working in Agile environments, using collaborative tools like JIRA and Confluence. Skilled in designing across multiple devices and platforms, ensuring high-quality, user-centric solutions. Strong visual and interaction design skills, with an eye for layout, typography, and visual hierarchy. Ability to balance business goals with user needs, driving meaningful product value. Proven success in cross-functional collaboration within dynamic, fast-paced environments. Data-informed decision-making, blending analytical insights with design intuition. Experience designing for iterative, experimental, and evolving features. Open-minded, egoless learner, receptive to constructive feedback and continuous improvement. Exceptional communication and presentation skills to articulate design decisions effectively. Highly proactive, organized, and adaptable, thriving in ambiguity. Self-sufficient while excelling in team collaboration, ensuring seamless execution in fast-moving projects.&lt;/p&gt; &lt;/div&gt; EXPERIENCE &lt;ul&gt; &lt;li&gt;6-8 Years&lt;/li&gt; &lt;/ul&gt; SKILLS &lt;ul&gt; &lt;li&gt;Primary Skill: UX Design&lt;/li&gt; &lt;li&gt;Sub Skill(s): UX Design &lt;/li&gt; &lt;li&gt;Additional Skill(s): UX Architect&lt;/li&gt; &lt;/ul&gt; ABOUT THE COMPANY &lt;span&gt; &lt;p&gt;Infogain is a human-centered digital platform and software engineering company based out of Silicon Valley. We engineer business outcomes for Fortune 500 companies and digital natives in the technology, healthcare, insurance, travel, telecom, and retail &amp;amp; CPG industries using technologies such as cloud, microservices, automation, IoT, and artificial intelligence. We accelerate experience-led transformation in the delivery of digital platforms. Infogain is also a Microsoft (NASDAQ: MSFT) Gold Partner and Azure Expert Managed Services Provider (MSP).&lt;/p&gt; &lt;p&gt;Infogain, an Apax Funds portfolio company, has offices in California, Washington, Texas, the UK, the UAE, and Singapore, with delivery centers in Seattle, Houston, Austin, Krak w, Noida, Gurgaon, Mumbai, Pune, and Bengaluru.&lt;/p&gt; &lt;/span&gt; &lt;div&gt; &lt;div&gt;&lt;/div&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Architect / Designer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Infogain&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814395/ux-designer-senior-at-infogain/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814395/ux-designer-senior-at-infogain/</link>
  <title>[Full Time] UX Designer (Senior) at Infogain</title>
  <dc:date>Tue, 03 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814427/developer-etl-at-zensar/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt; &lt;span&gt; &lt;strong&gt;Development / ETL Developer&lt;/strong&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt;The ETL Developer will play a critical role in designing, building, and maintaining integration process flows for the &lt;strong&gt;Digital Integration Hub&lt;/strong&gt; (DIH), centralized integration platform for Trading and Asset Services. This role is focused on ETL development, data transformation, API integrations, and secure, scalable workflow design using industry leading tool, Adeptia. &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt;You will work closely with Product Owners, Business Analysts, QA, DevOps, and the Scrum Master to deliver highquality, secure, and reliable integration solutions that support DIH s strategic roadmap. &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;strong&gt;Position Responsibilities:&lt;/strong&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt;Design, develop, test, and maintain DIH integration connectors using ETL tool Adeptia. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Build and optimize ETL workflows that extract, transform, validate, and load data across internal and external systems. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Implement API integrations (REST/SOAP) using modern authentication methods (OAuth, JWT, API keys). &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Map source and destination fields, define transformation logic, and ensure data accuracy and consistency. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Write clean, maintainable, and secure integration logic following development standards. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Create process flows, sequence diagrams, and technical documentation to support development and maintenance. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Assist engineering teams with design changes and architectural improvements. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Collaborate with QA engineers to define acceptance criteria, test strategies, and quality gates. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Support automated testing and CI/CD pipeline improvements. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Troubleshoot defects, identify root causes, and implement corrective actions. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Ensure all integration workflows follow secure coding practices and security standards. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Support security reviews, audits, and remediation activities throughout the development lifecycle. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Collaborate with DevOps on deployments, environment configuration, and pipeline automation. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Provide technical input for release notes, documentation, and training materials. &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;span&gt; &lt;strong&gt;Position Requirements: &lt;/strong&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt;Bachelor s degree in software engineering, computer science or other related discipline or the equivalent combination of education, training, or work experience &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Experience designing complex multi step ETL pipelines with branching logic, conditional flows, and error handling using tools such as Adeptia, Azure Data Factory, Informatica, or similar platforms (&lt;strong&gt;Adeptia preferred&lt;/strong&gt;) &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Strong SQL skills and familiarity with JavaScript, or equivalent programming language &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Familiarity with secure coding practices, especially around data movement and API consumption &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Knowledge of CI/CD pipelines, automated testing, and DevOps practices &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Understanding of data modeling concepts, including schemas, transformations, and data lineage &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;strong&gt;Ideal candidate has previous experience in the FinTech industry with exposure to financial markets, private equity, or investment accounting&lt;/strong&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Knowledge of end-to-end software development life cycles, e.g., Agile, SAFeAgile &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Strong interpersonal communication skills &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Excellent written, verbal communication and presentation skills &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Ability to collaborate with globally distributed teams. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Strong analytical, problem-solving, and time management skills &lt;/span&gt; &lt;br&gt; &lt;span&gt; &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;span&gt; &lt;strong&gt;About Digital Integration Hub&lt;/strong&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt;Digital Integration Hub (DIH) team is structured horizontally across Trading &amp;amp; Asset Services business and aims to build and rebuild digital connectors between internal products as well as third-party partners. DIH seeks to integrate products and solutions using modern integration standards and best practices. DIH works with business/solution leaders responsible for key ecosystems to meet integration requirements aimed at linking products and solutions to provide a more automated, unified experience for our clients. DIH is a centralized home for strategic integrations, established to build, enhance, maintain, and support strategic integrations. &lt;/span&gt; &lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;ETL Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Zensar&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Nagar&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814427/developer-etl-at-zensar/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814427/developer-etl-at-zensar/</link>
  <title>[Full Time] Developer ETL at Zensar</title>
  <dc:date>Tue, 03 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/816789/datawarehouse-architect-at-icici-bank/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&lt;strong&gt;&lt;span&gt;Essential Services: Role &amp;amp; Location fungibility&lt;/span&gt;&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;The role descriptions give you an overview of the responsibilities; it is only directional and guiding in nature. At ICICI Bank, we believe in serving our customers beyond our role definition, product boundaries, and domain limitations through our philosophy of customer 360-degree. In essence, this captures our belief in serving the entire banking needs of our customers as One Bank, One Team. To achieve this, employees at ICICI Bank are expected to be role and location-fungible with the understanding that Banking is an essential service.&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;&lt;span&gt;&lt;/span&gt;&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;&lt;span&gt;About the Role:&lt;/span&gt;&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;As a Data Warehouse Architect, you will be responsible for managing and enhancing data warehouse that manages large volume of customer-life cycle data flowing in from various applications within guardrails of risk and compliance.&lt;strong&gt;&lt;/strong&gt;You will be managing the day-to-day operations of data warehouse i.e. Vertica. In this role responsibility, you will manage a team of data warehouse engineers to develop data modelling, designing ETL data pipeline, issue management, upgrades, performance fine-tuning, migration, governance and security framework of the data warehouse. This role enables the Bank to maintain huge data sets in a structured manner that is amenable for data intelligence. The data warehouse supports numerous information systems used by various business groups to derive insights.&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;As a natural progression, the data warehouses will be gradually migrated to Data Lake enabling better analytical advantage. The role holder will also be responsible for guiding the team towards this migration.&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;&lt;span&gt;Key Responsibilities:&lt;/span&gt;&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;&lt;strong&gt;Data Pipeline Design:&lt;/strong&gt; Responsible for designing and developing ETL data pipelines that can help in organising large volumes of data. Use of data warehousing technologies to ensure that the data warehouse is efficient, scalable, and secure.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Issue Management&lt;/strong&gt;: Responsible for ensuring that the data warehouse is running smoothly. Monitor system performance, diagnose and troubleshoot issues,&lt;strong&gt;&lt;/strong&gt;and make necessary changes to optimize system performance.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Collaboration:&lt;/strong&gt; Collaborate with cross-functional teams to implement upgrades, migrations and continuous improvements.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Data Integration and Processing&lt;/strong&gt;: Responsible for processing, cleaning, and integrating large data sets from various sources to ensure that the data is accurate, complete, and consistent.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Data Modelling&lt;/strong&gt;: Responsible for designing and implementing data modelling solutions to ensure that the organizations data is properly structured and organized for analysis.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;span&gt;&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;&lt;span&gt;Key Qualifications &amp;amp; Skills:&lt;/span&gt;&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;&lt;strong&gt;Education Qualification:&lt;/strong&gt; B.E./B. Tech. in Computer Science, Information Technology or equivalent domain with 10 to 12 years of experience and at least 5 years or relevant work experience in Datawarehouse/mining/BI/MIS.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Experience in Data Warehousing:&lt;/strong&gt; Knowledge on ETL and data technologies and outline future vision in OLTP, OLAP (Oracle / MSSQL). Data Modelling, Data Analysis and Visualization experience (Analytical tools experience like Power BI / SAS / ClickView / Tableu etc). Good to have exposure to Azure Cloud Data platform services like COSMOS, Azure Data Lake, Azure Synapse, and Azure Data factory.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Synergize with the Team:&lt;/strong&gt; Regular interaction with business/product/functional teams to create mobility solutions.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Certification:&lt;/strong&gt; Azure certified DP 900, PL 300, DP 203 or any other Data platform/Data Analyst certifications.&lt;/li&gt;&lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Banking&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;ICICI Bank&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/816789/datawarehouse-architect-at-icici-bank/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/816789/datawarehouse-architect-at-icici-bank/</link>
  <title>[Full Time] Datawarehouse Architect at ICICI Bank</title>
  <dc:date>Tue, 03 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814319/database-architect-at-icertis/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;ul&gt; &lt;li&gt;Design scalable, secure, and highly available architectures using Azure SQL Database, Managed Instance, and SQL Server on Azure VM.&lt;/li&gt; &lt;li&gt;Define database standards, reference architectures, and best practices.&lt;/li&gt; &lt;li&gt;Architect HA/DR solutions using Failover Groups, Active Geo-replication, Zone Redundancy, and backup strategies.&lt;/li&gt; &lt;li&gt;Lead performance engineering, capacity planning, and scalability strategies.&lt;/li&gt; &lt;li&gt;Design and enforce database security (Azure AD, TDE, Always Encrypted, Auditing, Defender for SQL).&lt;/li&gt; &lt;li&gt;Experience with performance tuning and optimization, using native monitoring and troubleshooting tools.&lt;/li&gt; &lt;li&gt;Lead database migration and modernization initiatives to Azure.&lt;/li&gt; &lt;li&gt;Drive automation and DevOps using PowerShell, Azure Automation, CI/CD, and Infrastructure as Code.&lt;/li&gt; &lt;li&gt;Implement monitoring, alerting, and lead root cause analysis (RCA).&lt;/li&gt; &lt;li&gt;Optimize Azure SQL costs using right-sizing, elastic pools, and reserved capacity.&lt;/li&gt; &lt;li&gt;Act as SME and escalation point, mentoring DBAs and guiding development teams.&lt;/li&gt; &lt;li&gt;Adopt AI tools /Agents/third party tools for DB operations &lt;/li&gt; &lt;li&gt;Cross team collaboration, SOP documentation &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt;Design scalable, secure, and highly available architectures using Azure SQL Database, Managed Instance, and SQL Server on Azure VM.&lt;/li&gt; &lt;li&gt;Define database standards, reference architectures, and best practices.&lt;/li&gt; &lt;li&gt;Architect HA/DR solutions using Failover Groups, Active Geo-replication, Zone Redundancy, and backup strategies.&lt;/li&gt; &lt;li&gt;Lead performance engineering, capacity planning, and scalability strategies.&lt;/li&gt; &lt;li&gt;Design and enforce database security (Azure AD, TDE, Always Encrypted, Auditing, Defender for SQL).&lt;/li&gt; &lt;li&gt;Experience with performance tuning and optimization, using native monitoring and troubleshooting tools.&lt;/li&gt; &lt;li&gt;Lead database migration and modernization initiatives to Azure.&lt;/li&gt; &lt;li&gt;Drive automation and DevOps using PowerShell, Azure Automation, CI/CD, and Infrastructure as Code.&lt;/li&gt; &lt;li&gt;Implement monitoring, alerting, and lead root cause analysis (RCA).&lt;/li&gt; &lt;li&gt;Optimize Azure SQL costs using right-sizing, elastic pools, and reserved capacity.&lt;/li&gt; &lt;li&gt;Act as SME and escalation point, mentoring DBAs and guiding development teams.&lt;/li&gt; &lt;li&gt;Adopt AI tools /Agents/third party tools for DB operations &lt;/li&gt; &lt;li&gt;Cross team collaboration, SOP documentation &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;SQL Server Database Administration, 8+ Years exp.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Azure certifications (Azure Database Administrator, Azure Solutions Architect).&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Experience working with FinOps and cloud cost governance.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Exposure to DevOps and IaC frameworks.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Experience supporting mission-critical, high-availability systems.&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;SQL Server Database Administration, 8+ Years exp.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Azure certifications (Azure Database Administrator, Azure Solutions Architect).&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Experience working with FinOps and cloud cost governance.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Exposure to DevOps and IaC frameworks.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Experience supporting mission-critical, high-availability systems.&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; DBA Architect to design, govern, and optimize enterprise database platforms with a strong focus on Azure SQL services. This role owns database architecture, high availability, performance, security, automation, and cost optimization, while partnering with application, cloud, and security teams. &lt;div&gt; &lt;p&gt; &lt;span&gt;Who we a&lt;/span&gt; &lt;span&gt;re:&lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;span&gt;Icertis is the only contract intelligence platform companies trust to keep them out in front, now and in the future. Our unwavering commitment to contract intelligence is grounded in our FORTE values Fairness, Openness, Respect, Teamwork and Execution which guide all our interactions with employees, customers, partners, and stakeholders. Because in our mission to be the contract intelligence platform of the world, we believe how we get there is as important as the destination.&lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt;Icertis, Inc. provides Equal Employment Opportunity to all employees and applicants for employment without regard to race, color, religion, gender identity or expression, sex, sexual orientation, national origin, age, disability, genetic information, marital status, amnesty, or status as a covered veteran in accordance with applicable federal, state and local laws. Icertis, Inc. complies with applicable state and local laws governing non-discrimination in employment in every location in which the company has facilities. If you are in need of accommodation or special assistance to navigate our website or to complete your application, please send an e-mail with your request to careers@icertis.com or get in touch with your recruiter.&lt;/p&gt; &lt;/div&gt; &lt;div&gt; &lt;br&gt; &lt;/div&gt; &lt;p&gt;Icertis is the global leader in AI-powe&apos;red contract intelligence. The Icertis platform revolutionizes contract management, equipping customers with powerful insights and automation to grow revenue, control costs, mitigate risk, and ensure compliance - the pillars of business success. Today, more than one third of the Fortune 100 trust Icertis to realize the full intent of millions of commercial agreements in 90+ countries.&lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;8+ years of experience in SQL Server / Database Administration, including architecture responsibilities.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Strong hands-on expertise with Azure SQL Database, Managed Instance, Hyperscale.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Deep knowledge of HA/DR, performance tuning, and security.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Experience with Azure networking (Private Endpoints, VNets).&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Deeps understanding of SQL performance tuning &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Experience supporting mission-critical, enterprise-scale systems.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Strong scripting/automation skills (PowerShell /Python preferred).&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;knowledge on AI tools/Agents is plus&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Experience handling tools such as Redgate, Datadog etc&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Architect / Designer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Icertis&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Pune&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814319/database-architect-at-icertis/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814319/database-architect-at-icertis/</link>
  <title>[Full Time] Database Architect at Icertis</title>
  <dc:date>Tue, 03 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814540/dea-core-on-premise-reporting-professional-at-zensar/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt; &lt;span&gt;The role requires strong technical expertise in Cognos to deliver reporting solutions and provide technical support for the business users. It includes serving as the point of contact for the business, collaborating closely with business users, team members and external technical teams. Role holder is expected to:&lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Possess strong investigative and analytical qualities&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Be educated to degree level or equivalent industry experience and it is desirable to have has an appropriate certification relating to the technical specialism that the role requires.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Have excellent communication skills in English - both written and verbal.&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Be a team player with a positive and constructive attitude towards work and colleagues.&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Apply a diligent approach to completing work on time and to an agreed, acceptable level of quality &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;IBM Cognos 10+, Tableau (good to have but not essential)&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Experience of Oracle PL/SQL, Star schema design (Kimball methodology).&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Good understanding of Kimball methodology / star schema design.&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Experience in development of data solutions for financial services.&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Cognos expert responsible for design and development of reports.&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Deliver Cognos reports in line with agreed requirements and quality standards&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Participate in a cross functional teamwork, following Agile principles with responsibility for the end-to-end process, delivering within expected time. &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;To follow standard Agile working practices:&lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Participate in all the daily and weekly ceremonies in your team&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Work with users from business teams to understand the requirements and develop appropriately innovative and practical solutions.&lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Work closely with ETL, QA and other developers to build data solutions.&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Manager&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Zensar&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Pune&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814540/dea-core-on-premise-reporting-professional-at-zensar/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814540/dea-core-on-premise-reporting-professional-at-zensar/</link>
  <title>[Full Time] DE&amp;amp;A - Core - On Premise Reporting Professional at Zensar</title>
  <dc:date>Tue, 03 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/816749/lead-application-development-engineering-associate-at-ncr-corporation/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;span&gt; Key Responsibilities &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Develop and support Oracle EBS CRM modules, including Service Requests (SR), Tasks, Incidents, Contact, Customer, Site, and related components. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Customize and enhance CRM workflows, D2K forms, reports, APIs, and business events. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Develop PL/SQL packages, procedures, views, and performance optimized CRM queries. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Build and support integrations between EBS CRM Fusion, using REST/SOAP services &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Perform issue analysis, debugging, and resolution for CRM-related defects. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Collaborate with functional teams to translate CRM business requirements into technical designs. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Prepare technical design documents, test cases, and release deployment artifacts. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;span&gt; Required Skills &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Bachelor s degree in computer science, IT, or related discipline. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; 4 5 years of hands-on experience in Oracle EBS R12 CRM modules. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Strong experience with CRM components: &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Service Request (SR), Tasks, Incidents &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Customer/Sites &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Strong skills in: &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; PL/SQL &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Oracle Forms, Workflows ,AOL objects &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; XML Publisher / BI Publisher &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Web services integrations &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Good understanding of Oracle EBS CRM architecture, tables, and APIs. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Experience with Oracle Fusion Cloud CRM (basic to intermediate). &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Good understanding of Unix and shell scripting. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Experience in performance tuning of CRM modules. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;span&gt; Preferred/Good-to-Have Skills &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Experience with OA Framework (OAF). &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Knowledge of D2K forms development. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Exposure to OIC integrations. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Business Events &amp;amp; Workflow customizations in CRM. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Experience with CRM-related integrations (e.g., CTI, third-party service tools). &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;span&gt; Soft Skills &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Strong communication and problem-solving skills. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Ability to work independently and collaboratively with functional/business users. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Attention to detail and strong ownership mindset. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;br&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;strong&gt; &lt;/strong&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;NCR Corporation&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Noida, Gurugram&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/816749/lead-application-development-engineering-associate-at-ncr-corporation/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/816749/lead-application-development-engineering-associate-at-ncr-corporation/</link>
  <title>[Full Time] Lead Application Development Engineering Associate at NCR Corporation</title>
  <dc:date>Mon, 02 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814203/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Databricks Unified Data Analytics Platform&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;br&gt;&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the data architecture aligns with the overall business objectives and technical specifications. You will collaborate with various teams to ensure that the data architecture is robust, scalable, and efficient, while also addressing any challenges that arise during the development process. Your role will be pivotal in shaping the data landscape of the organization, enabling data-driven decision-making and fostering innovation through effective data management practices. Responsibilities:Develop high-quality, scalable ETL/ELT pipelines using Databricks technologies including Delta Lake, Auto Loader, and DLT.Excellent programming and debugging skills in Python.Strong hands-on experience with Py Spark to build efficient data transformation and validation logic.Must be proficient in at least one cloud platform:AWS, GCP, or Azure.Create modular DBX functions for transformation, PII masking, and validation logic reusable across DLT and notebook pipelines.Implement ingestion patterns using Auto Loader with checkpointing and schema evolution for structured and semi-structured data.Build secure and observable DLT pipelines with DLT Expectations, supporting Bronze/Silver/Gold medallion layering.Configure Unity Catalog:set up catalogs, schemas, user/group access, enable audit logging, and define masking for PII fields.Enable secure data access across domains and workspaces via Unity Catalog External Locations, Volumes, and lineage tracking.Access and utilize data assets from the Databricks Marketplace to support enrichment, model training, or benchmarking.Collaborate with data sharing stakeholders to implement Delta Sharing both internally and externally.Integrate Power BI/Tableau/Looker with Databricks using optimized connectors (ODBC/JDBC) and Unity Catalog security controls.Build stakeholder-facing SQL Dashboards within Databricks to monitor KPIs, data pipeline health, and operational SLAs.Prepare Gen AI-compatible datasets:manage vector embeddings, index with Databricks Vector Search, and use Feature Store with ML flow.Package and deploy pipelines using Databricks Asset Bundles through CI/CD pipelines in GitHub or GitLab.Troubleshoot, tune, and optimize jobs using Photon engine and serverless compute, ensuring cost efficiency and SLA reliability.Experience with cloud-based services relevant to data engineering, data storage, data processing, data warehousing, real-time streaming, and serverless computing.Hands on Experience in applying Performance optimization techniquesUnderstanding data modeling and data warehousing principles is essential.Nice to Have:1.Certifications:Databricks Certified Professional or similar certifications.2.Machine Learning:Knowledge of machine learning concepts and experience with popular ML libraries.3.Knowledge of big data processing (e.g., Spark, Hadoop, Hive, Kafka)4.Data Orchestration:Apache Airflow.5.Knowledge of CI/CD pipelines and DevOps practices in a cloud environment.6.Experience with ETL tools like Informatica, Talend, Mati Llion, or Five Tran.7.Familiarity with DBT (Data Build Tool)Additional Information:- The candidate should have minimum 7.5 years of experience in Databricks Unified Data Analytics Platform.- This position is based at our Bengaluru office.&lt;br&gt;Educational Qualification:- 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814203/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814203/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Mon, 02 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814062/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Databricks Unified Data Analytics Platform&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;br&gt;&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the data architecture aligns with the overall business objectives and technical specifications. You will collaborate with various teams to ensure that the data architecture is robust, scalable, and efficient, while also addressing any challenges that arise during the development process. Your role will be pivotal in shaping the data landscape of the organization, enabling data-driven decision-making and fostering innovation through effective data management practices. Responsibilities:Develop high-quality, scalable ETL/ELT pipelines using Databricks technologies including Delta Lake, Auto Loader, and DLT.Excellent programming and debugging skills in Python.Strong hands-on experience with Py Spark to build efficient data transformation and validation logic.Must be proficient in at least one cloud platform:AWS, GCP, or Azure.Create modular DBX functions for transformation, PII masking, and validation logic reusable across DLT and notebook pipelines.Implement ingestion patterns using Auto Loader with checkpointing and schema evolution for structured and semi-structured data.Build secure and observable DLT pipelines with DLT Expectations, supporting Bronze/Silver/Gold medallion layering.Configure Unity Catalog:set up catalogs, schemas, user/group access, enable audit logging, and define masking for PII fields.Enable secure data access across domains and workspaces via Unity Catalog External Locations, Volumes, and lineage tracking.Access and utilize data assets from the Databricks Marketplace to support enrichment, model training, or benchmarking.Collaborate with data sharing stakeholders to implement Delta Sharing both internally and externally.Integrate Power BI/Tableau/Looker with Databricks using optimized connectors (ODBC/JDBC) and Unity Catalog security controls.Build stakeholder-facing SQL Dashboards within Databricks to monitor KPIs, data pipeline health, and operational SLAs.Prepare Gen AI-compatible datasets:manage vector embeddings, index with Databricks Vector Search, and use Feature Store with ML flow.Package and deploy pipelines using Databricks Asset Bundles through CI/CD pipelines in GitHub or GitLab.Troubleshoot, tune, and optimize jobs using Photon engine and serverless compute, ensuring cost efficiency and SLA reliability.Experience with cloud-based services relevant to data engineering, data storage, data processing, data warehousing, real-time streaming, and serverless computing.Hands on Experience in applying Performance optimization techniquesUnderstanding data modeling and data warehousing principles is essential.Nice to Have:1.Certifications:Databricks Certified Professional or similar certifications.2.Machine Learning:Knowledge of machine learning concepts and experience with popular ML libraries.3.Knowledge of big data processing (e.g., Spark, Hadoop, Hive, Kafka)4.Data Orchestration:Apache Airflow.5.Knowledge of CI/CD pipelines and DevOps practices in a cloud environment.6.Experience with ETL tools like Informatica, Talend, Mati Llion, or Five Tran.7.Familiarity with DBT (Data Build Tool)Additional Information:- The candidate should have minimum 7.5 years of experience in Databricks Unified Data Analytics Platform.- This position is based at our Bengaluru office.&lt;br&gt;Educational Qualification:- 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814062/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814062/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Mon, 02 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814202/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt; &lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt; &lt;b&gt;Must have skills :&lt;/b&gt;Database Architecture&lt;br&gt; &lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;br&gt; &lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time educationDatabase SpecialistCareer Level:8Location:Mumbai, Pune, BangaloreAbout the RoleAs a Database Specialist, you will play a critical role in designing and optimizing data architectures that power business applications. Your day will involve defining and evolving logical and physical database models, monitoring and tuning performance through indexing and query optimization, and implementing robust backup strategies. You ll collaborate closely with application teams, DevOps, and infrastructure to ensure seamless integration and support for enterprise systems. This role also demands expertise in ETL processes, data modeling, and performance tuning, along with familiarity with distributed computing, cloud-based databases, and modern storage architectures.Responsibilities Define and evolve logical and physical database models contribute to system data architecture decisions. Monitor and tune databases for optimal performance, including indexing, query optimization, and resource management. Design and implement robust backup and recovery strategies. Architect and maintain IDVS metadata and configuration databases. Optimize queries for real-time document validation and reporting. Implement secure storage and indexing strategies for digitized documents. Ensure compliance with data governance and audit requirements. Collaborate with application teams, DevOps, and infrastructure to ensure seamless integration and support for business applications. Manage ETL processes, data modelling, and performance tuning for large-scale systems. Work with distributed computing environments and cloud-based database/storage architectures.Required Skills Database Design &amp;amp; Modeling (5??7 years):Expertise in logical and physical modeling, schema design, and normalization. Performance Optimization (4??6 years):Strong skills in indexing, query tuning, and resource management. Backup &amp;amp; Recovery (3??5 years):Experience implementing secure and reliable strategies. ETL &amp;amp; Data Integration (3??5 years):Hands-on experience with ETL tools and data pipelines. Cloud &amp;amp; Distributed Systems (2??4 years):Familiarity with Azure SQL, AWS RDS, or similar platforms. Collaboration &amp;amp; DevOps (2??3 years):Ability to work with cross-functional teams and CI/CD environments. Strong knowledge of API-based data exchange and SFTP integrations.&lt;br&gt;Qualifications Bachelor s or Master s degree in Computer Science, Information Systems, or related field. 6??8 years of experience in database administration and architecture. Exposure to enterprise-scale systems and cloud-based database solutions preferred.&lt;br&gt; Qualification15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814202/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814202/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Mon, 02 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814199/sql-dba-sql-server-dba-performance-tuning-at-tata-consultancy/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt; &lt;strong&gt;Role-SQL DBA (SQL Server DBA + Performance tuning)&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Experience- 5 to 9 Years.&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Location- Delhi / Hyderabad / Mumbai / Bangalore&lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;***Virtual Interview***&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Required Technical Skill Set-&lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;Experience on SQL Server DBA along with cloud (azure and AWS) Proficient on DBA Performance tuning experience&lt;/li&gt;&lt;li&gt;Experience to design, implement, and maintain high-performance SQL Server databases, ensuring optimal performance, availability, and reliability&lt;/li&gt;&lt;li&gt;Perform in-depth performance tuning and optimization, identifying and resolving performance bottlenecks, slow queries, query tuning and other related issues&lt;/li&gt;&lt;li&gt;Analyze wait events and proactively address performance challenges to ensure smooth and efficient database operations&lt;/li&gt;&lt;li&gt;Oversee and manage database replication,always-on strategies, including setup, configuration, monitoring, and troubleshooting of environments&lt;/li&gt;&lt;li&gt;Monitor database health using various tools, set up alerting mechanisms, and respond to incidents in a timely manner to minimize downtime and data loss&lt;/li&gt;&lt;li&gt;Implement and manage Change Data Capture (CDC) processes to enable real-time data integration and synchronization across databases&lt;/li&gt;&lt;li&gt;Work closely with cross-functional and cloud teams to plan and execute database upgrades, migrations, and patching while minimizing risks and downtime&lt;/li&gt;&lt;li&gt;Documenting database configurations, processes, and best practices, contributing to the creation and maintenance of internal knowledge resources&lt;/li&gt;&lt;li&gt;Documenting performance tuning issuesresolution steps to contributing to the internal knowledge resources&lt;/li&gt;&lt;li&gt;Documenting performance tuning issuesresolution steps to contributing to the internal knowledge resources&lt;/li&gt;&lt;li&gt;Stay up-to-date with the latest industry trends, emerging technologies, and best practices in SQL Server database administration.&lt;/li&gt;&lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Tata Consultancy&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814199/sql-dba-sql-server-dba-performance-tuning-at-tata-consultancy/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814199/sql-dba-sql-server-dba-performance-tuning-at-tata-consultancy/</link>
  <title>[Full Time] Sql Dba (sql Server Dba + Performance Tuning) at Tata Consultancy</title>
  <dc:date>Mon, 02 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814196/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Databricks Unified Data Analytics Platform&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;br&gt;&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the data architecture aligns with the overall business objectives and technical specifications. You will collaborate with various teams to ensure that the data architecture is robust, scalable, and efficient, while also addressing any challenges that arise during the development process. Your role will be pivotal in shaping the data landscape of the organization, enabling data-driven decision-making and fostering innovation through effective data management practices. Responsibilities:Develop high-quality, scalable ETL/ELT pipelines using Databricks technologies including Delta Lake, Auto Loader, and DLT.Excellent programming and debugging skills in Python.Strong hands-on experience with Py Spark to build efficient data transformation and validation logic.Must be proficient in at least one cloud platform:AWS, GCP, or Azure.Create modular DBX functions for transformation, PII masking, and validation logic reusable across DLT and notebook pipelines.Implement ingestion patterns using Auto Loader with checkpointing and schema evolution for structured and semi-structured data.Build secure and observable DLT pipelines with DLT Expectations, supporting Bronze/Silver/Gold medallion layering.Configure Unity Catalog:set up catalogs, schemas, user/group access, enable audit logging, and define masking for PII fields.Enable secure data access across domains and workspaces via Unity Catalog External Locations, Volumes, and lineage tracking.Access and utilize data assets from the Databricks Marketplace to support enrichment, model training, or benchmarking.Collaborate with data sharing stakeholders to implement Delta Sharing both internally and externally.Integrate Power BI/Tableau/Looker with Databricks using optimized connectors (ODBC/JDBC) and Unity Catalog security controls.Build stakeholder-facing SQL Dashboards within Databricks to monitor KPIs, data pipeline health, and operational SLAs.Prepare Gen AI-compatible datasets:manage vector embeddings, index with Databricks Vector Search, and use Feature Store with ML flow.Package and deploy pipelines using Databricks Asset Bundles through CI/CD pipelines in GitHub or GitLab.Troubleshoot, tune, and optimize jobs using Photon engine and serverless compute, ensuring cost efficiency and SLA reliability.Experience with cloud-based services relevant to data engineering, data storage, data processing, data warehousing, real-time streaming, and serverless computing.Hands on Experience in applying Performance optimization techniquesUnderstanding data modeling and data warehousing principles is essential.Nice to Have:1.Certifications:Databricks Certified Professional or similar certifications.2.Machine Learning:Knowledge of machine learning concepts and experience with popular ML libraries.3.Knowledge of big data processing (e.g., Spark, Hadoop, Hive, Kafka)4.Data Orchestration:Apache Airflow.5.Knowledge of CI/CD pipelines and DevOps practices in a cloud environment.6.Experience with ETL tools like Informatica, Talend, Mati Llion, or Five Tran.7.Familiarity with DBT (Data Build Tool)Additional Information:- The candidate should have minimum 7.5 years of experience in Databricks Unified Data Analytics Platform.- This position is based at our Bengaluru office.&lt;br&gt;Educational Qualification:- 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814196/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814196/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Mon, 02 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814065/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Power Business Intelligence (BI)&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;br&gt;&lt;b&gt;Summary&lt;/b&gt;-Advanced Visualization Lead (Power BI) As an application lead, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data model and reporting ensuring that the report architecture aligns with business objectives and supports efficient data management practices. You will collaborate with various stakeholders to gather requirements and translate them into effective data solutions, contributing to the overall success of the projects you are involved in.Roles &amp;amp; Responsibilities:- Expected to be an SME.- Collaborate and manage the team to perform.- Responsible for team decisions.- Engage with multiple teams and contribute on key decisions.- Provide solutions to problems for their immediate team and across multiple teams.- Facilitate knowledge sharing and mentoring within the team to enhance overall performance.- Develop and maintain documentation related to data architecture and design processes.Professional &amp;amp; Technical &lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;li&gt;- Must Have &lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;Proficiency in Microsoft Power Business Intelligence (BI).- Strong analytical, problem-solving skills to interpret complex data sets and derive meaningful insights. -Strong development skills in MS PBI along with expertise in Dax expressions, SQL.&lt;/li&gt;&lt;li&gt;Strong understanding of data modeling techniques and best practices.- Experience with data integration tools and ETL processes.- Familiarity with cloud data storage solutions and architectures.- Ability to analyze and optimize performance improvements.&lt;br&gt;Additional Information:The candidate should have minimum 7.5 years of experience in Microsoft Power Business Intelligence (BI).A 15-year full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814065/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814065/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Mon, 02 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814168/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration.&lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Data Architecture Principles&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;br&gt;&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the data architecture aligns with the overall business objectives and technical specifications. You will collaborate with various teams to ensure that the data architecture supports the applications functionality and performance needs, while also considering scalability and security aspects. Engaging with stakeholders, you will gather requirements and translate them into effective data solutions, contributing to the overall success of the project. Roles &amp;amp; Responsibilities:- Expected to be an SME.- Collaborate and manage the team to perform.- Responsible for team decisions.- Engage with multiple teams and contribute on key decisions.- Provide solutions to problems for their immediate team and across multiple teams.- Develop and maintain comprehensive documentation of data architecture and design decisions.- Mentor junior professionals in best practices and emerging trends in data architecture. Professional &amp;amp; Technical &lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;li&gt;- Must To Have &lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;Proficiency in Data Architecture Principles.- Strong understanding of data modeling techniques and methodologies.- Experience with data integration tools and ETL processes.- Familiarity with cloud data architecture and services.- Knowledge of database management systems and data warehousing concepts. Additional Information:- The candidate should have minimum 7.5 years of experience in Data Architecture Principles.- This position is based at our Bengaluru office.- A 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814168/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814168/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Mon, 02 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814071/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Databricks Unified Data Analytics Platform&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;br&gt;&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the data architecture aligns with the overall business objectives and technical specifications. You will collaborate with various teams to ensure that the data architecture is robust, scalable, and efficient, while also addressing any challenges that arise during the development process. Your role will be pivotal in shaping the data landscape of the organization, enabling data-driven decision-making and fostering innovation through effective data management practices. Responsibilities:Develop high-quality, scalable ETL/ELT pipelines using Databricks technologies including Delta Lake, Auto Loader, and DLT.Excellent programming and debugging skills in Python.Strong hands-on experience with Py Spark to build efficient data transformation and validation logic.Must be proficient in at least one cloud platform:AWS, GCP, or Azure.Create modular DBX functions for transformation, PII masking, and validation logic reusable across DLT and notebook pipelines.Implement ingestion patterns using Auto Loader with checkpointing and schema evolution for structured and semi-structured data.Build secure and observable DLT pipelines with DLT Expectations, supporting Bronze/Silver/Gold medallion layering.Configure Unity Catalog:set up catalogs, schemas, user/group access, enable audit logging, and define masking for PII fields.Enable secure data access across domains and workspaces via Unity Catalog External Locations, Volumes, and lineage tracking.Access and utilize data assets from the Databricks Marketplace to support enrichment, model training, or benchmarking.Collaborate with data sharing stakeholders to implement Delta Sharing both internally and externally.Integrate Power BI/Tableau/Looker with Databricks using optimized connectors (ODBC/JDBC) and Unity Catalog security controls.Build stakeholder-facing SQL Dashboards within Databricks to monitor KPIs, data pipeline health, and operational SLAs.Prepare Gen AI-compatible datasets:manage vector embeddings, index with Databricks Vector Search, and use Feature Store with ML flow.Package and deploy pipelines using Databricks Asset Bundles through CI/CD pipelines in GitHub or GitLab.Troubleshoot, tune, and optimize jobs using Photon engine and serverless compute, ensuring cost efficiency and SLA reliability.Experience with cloud-based services relevant to data engineering, data storage, data processing, data warehousing, real-time streaming, and serverless computing.Hands on Experience in applying Performance optimization techniquesUnderstanding data modeling and data warehousing principles is essential.Nice to Have:1.Certifications:Databricks Certified Professional or similar certifications.2.Machine Learning:Knowledge of machine learning concepts and experience with popular ML libraries.3.Knowledge of big data processing (e.g., Spark, Hadoop, Hive, Kafka)4.Data Orchestration:Apache Airflow.5.Knowledge of CI/CD pipelines and DevOps practices in a cloud environment.6.Experience with ETL tools like Informatica, Talend, Mati Llion, or Five Tran.7.Familiarity with DBT (Data Build Tool)Additional Information:- The candidate should have minimum 7.5 years of experience in Databricks Unified Data Analytics Platform.- This position is based at our Bengaluru office.&lt;br&gt;Educational Qualification:- 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814071/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814071/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Mon, 02 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814103/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Databricks Unified Data Analytics Platform&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;br&gt;&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the data architecture aligns with the overall business objectives and technical specifications. You will collaborate with various teams to ensure that the data architecture is robust, scalable, and efficient, while also addressing any challenges that arise during the development process. Your role will be pivotal in shaping the data landscape of the organization, enabling data-driven decision-making and fostering innovation through effective data management practices. Responsibilities:Develop high-quality, scalable ETL/ELT pipelines using Databricks technologies including Delta Lake, Auto Loader, and DLT.Excellent programming and debugging skills in Python.Strong hands-on experience with Py Spark to build efficient data transformation and validation logic.Must be proficient in at least one cloud platform:AWS, GCP, or Azure.Create modular DBX functions for transformation, PII masking, and validation logic reusable across DLT and notebook pipelines.Implement ingestion patterns using Auto Loader with checkpointing and schema evolution for structured and semi-structured data.Build secure and observable DLT pipelines with DLT Expectations, supporting Bronze/Silver/Gold medallion layering.Configure Unity Catalog:set up catalogs, schemas, user/group access, enable audit logging, and define masking for PII fields.Enable secure data access across domains and workspaces via Unity Catalog External Locations, Volumes, and lineage tracking.Access and utilize data assets from the Databricks Marketplace to support enrichment, model training, or benchmarking.Collaborate with data sharing stakeholders to implement Delta Sharing both internally and externally.Integrate Power BI/Tableau/Looker with Databricks using optimized connectors (ODBC/JDBC) and Unity Catalog security controls.Build stakeholder-facing SQL Dashboards within Databricks to monitor KPIs, data pipeline health, and operational SLAs.Prepare Gen AI-compatible datasets:manage vector embeddings, index with Databricks Vector Search, and use Feature Store with ML flow.Package and deploy pipelines using Databricks Asset Bundles through CI/CD pipelines in GitHub or GitLab.Troubleshoot, tune, and optimize jobs using Photon engine and serverless compute, ensuring cost efficiency and SLA reliability.Experience with cloud-based services relevant to data engineering, data storage, data processing, data warehousing, real-time streaming, and serverless computing.Hands on Experience in applying Performance optimization techniquesUnderstanding data modeling and data warehousing principles is essential.Nice to Have:1.Certifications:Databricks Certified Professional or similar certifications.2.Machine Learning:Knowledge of machine learning concepts and experience with popular ML libraries.3.Knowledge of big data processing (e.g., Spark, Hadoop, Hive, Kafka)4.Data Orchestration:Apache Airflow.5.Knowledge of CI/CD pipelines and DevOps practices in a cloud environment.6.Experience with ETL tools like Informatica, Talend, Mati Llion, or Five Tran.7.Familiarity with DBT (Data Build Tool)Additional Information:- The candidate should have minimum 7.5 years of experience in Databricks Unified Data Analytics Platform.- This position is based at our Bengaluru office.&lt;br&gt;Educational Qualification:- 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814103/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814103/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Mon, 02 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814147/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Databricks Unified Data Analytics Platform&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;br&gt;&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the data architecture aligns with the overall business objectives and technical specifications. You will collaborate with various teams to ensure that the data architecture is robust, scalable, and efficient, while also addressing any challenges that arise during the development process. Your role will be pivotal in shaping the data landscape of the organization, enabling data-driven decision-making and fostering innovation through effective data management practices. Responsibilities:Develop high-quality, scalable ETL/ELT pipelines using Databricks technologies including Delta Lake, Auto Loader, and DLT.Excellent programming and debugging skills in Python.Strong hands-on experience with Py Spark to build efficient data transformation and validation logic.Must be proficient in at least one cloud platform:AWS, GCP, or Azure.Create modular DBX functions for transformation, PII masking, and validation logic reusable across DLT and notebook pipelines.Implement ingestion patterns using Auto Loader with checkpointing and schema evolution for structured and semi-structured data.Build secure and observable DLT pipelines with DLT Expectations, supporting Bronze/Silver/Gold medallion layering.Configure Unity Catalog:set up catalogs, schemas, user/group access, enable audit logging, and define masking for PII fields.Enable secure data access across domains and workspaces via Unity Catalog External Locations, Volumes, and lineage tracking.Access and utilize data assets from the Databricks Marketplace to support enrichment, model training, or benchmarking.Collaborate with data sharing stakeholders to implement Delta Sharing both internally and externally.Integrate Power BI/Tableau/Looker with Databricks using optimized connectors (ODBC/JDBC) and Unity Catalog security controls.Build stakeholder-facing SQL Dashboards within Databricks to monitor KPIs, data pipeline health, and operational SLAs.Prepare Gen AI-compatible datasets:manage vector embeddings, index with Databricks Vector Search, and use Feature Store with ML flow.Package and deploy pipelines using Databricks Asset Bundles through CI/CD pipelines in GitHub or GitLab.Troubleshoot, tune, and optimize jobs using Photon engine and serverless compute, ensuring cost efficiency and SLA reliability.Experience with cloud-based services relevant to data engineering, data storage, data processing, data warehousing, real-time streaming, and serverless computing.Hands on Experience in applying Performance optimization techniquesUnderstanding data modeling and data warehousing principles is essential.Nice to Have:1.Certifications:Databricks Certified Professional or similar certifications.2.Machine Learning:Knowledge of machine learning concepts and experience with popular ML libraries.3.Knowledge of big data processing (e.g., Spark, Hadoop, Hive, Kafka)4.Data Orchestration:Apache Airflow.5.Knowledge of CI/CD pipelines and DevOps practices in a cloud environment.6.Experience with ETL tools like Informatica, Talend, Mati Llion, or Five Tran.7.Familiarity with DBT (Data Build Tool)Additional Information:- The candidate should have minimum 7.5 years of experience in Databricks Unified Data Analytics Platform.- This position is based at our Bengaluru office.&lt;br&gt;Educational Qualification:- 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814147/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814147/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Mon, 02 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814046/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Azure Analytics Services&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;15&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the architecture aligns with business needs and technical specifications. You will collaborate with various teams to ensure that data flows seamlessly and efficiently throughout the organization, while also addressing any challenges that arise in the data management process. Your role will be pivotal in shaping the data landscape of the organization, enabling informed decision-making and strategic planning. Roles &amp;amp; Responsibilities:A.Function as the Lead Data Architect for a small, simple project/proposal or as a team lead for medium/large sized project or proposalB.Discuss specific Big data architecture and related issues with client architect/team (in area of expertise)C.Analyze and assess the impact of the requirements on the data and its lifecycleD.Lead Big data architecture and design medium-big Cloud based, Big Data and Analytical Solutions using Lambda architecture.E.Breadth of experience in various client scenarios and situationsF.Experienced in Big Data Architecture-based sales and deliveryG.Thought leadership and innovationH.Lead creation of new data assets &amp;amp; offeringsI.Experience in handling OLTP and OLAP data workloads Professional &amp;amp; Technical &lt;b&gt;Skills:&lt;/b&gt;&lt;li&gt;A.Strong experience in Azure is preferred with hands-on experience in two or more of these skills :Azure Synapse Analytics, Azure HDInsight, Azure Databricks with PySpark / Scala / SparkSQL, Azure Analysis ServicesB.Experience in one or more Real-time/Streaming technologies including:Azure Stream Analytics, Azure Data Explorer, Azure Time Series Insights, etc.C.Experience in handling medium to large Big Data implementationsD.Candidate must have around 5 years of extensive Big data experienceE.Candidate must have 15 years of IT experience and around 5 years of extensive Big data experience (design + build) &lt;br&gt;Additional Information:A.Should be able to drive the technology design meetings, propose technology design and architecture B.Should have excellent client communication skillsC.Should have good analytical and problem-solving skills&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814046/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814046/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Mon, 02 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814129/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Databricks Unified Data Analytics Platform&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the data architecture aligns with the overall business objectives and technical specifications. You will collaborate with various teams to ensure that the data architecture is robust, scalable, and efficient, while also addressing any challenges that arise during the development process. Your role will be pivotal in shaping the data landscape of the organization, enabling data-driven decision-making and fostering innovation through effective data management practices. Responsibilities:Develop high-quality, scalable ETL/ELT pipelines using Databricks technologies including Delta Lake, Auto Loader, and DLT.Excellent programming and debugging skills in Python.Strong hands-on experience with Py Spark to build efficient data transformation and validation logic.Must be proficient in at least one cloud platform:AWS, GCP, or Azure.Create modular DBX functions for transformation, PII masking, and validation logic reusable across DLT and notebook pipelines.Implement ingestion patterns using Auto Loader with checkpointing and schema evolution for structured and semi-structured data.Build secure and observable DLT pipelines with DLT Expectations, supporting Bronze/Silver/Gold medallion layering.Configure Unity Catalog:set up catalogs, schemas, user/group access, enable audit logging, and define masking for PII fields.Enable secure data access across domains and workspaces via Unity Catalog External Locations, Volumes, and lineage tracking.Access and utilize data assets from the Databricks Marketplace to support enrichment, model training, or benchmarking.Collaborate with data sharing stakeholders to implement Delta Sharing both internally and externally.Integrate Power BI/Tableau/Looker with Databricks using optimized connectors (ODBC/JDBC) and Unity Catalog security controls.Build stakeholder-facing SQL Dashboards within Databricks to monitor KPIs, data pipeline health, and operational SLAs.Prepare Gen AI-compatible datasets:manage vector embeddings, index with Databricks Vector Search, and use Feature Store with ML flow.Package and deploy pipelines using Databricks Asset Bundles through CI/CD pipelines in GitHub or GitLab.Troubleshoot, tune, and optimize jobs using Photon engine and serverless compute, ensuring cost efficiency and SLA reliability.Experience with cloud-based services relevant to data engineering, data storage, data processing, data warehousing, real-time streaming, and serverless computing.Hands on Experience in applying Performance optimization techniquesUnderstanding data modeling and data warehousing principles is essential.Nice to Have:1.Certifications:Databricks Certified Professional or similar certifications.2.Machine Learning:Knowledge of machine learning concepts and experience with popular ML libraries.3.Knowledge of big data processing (e.g., Spark, Hadoop, Hive, Kafka)4.Data Orchestration:Apache Airflow.5.Knowledge of CI/CD pipelines and DevOps practices in a cloud environment.6.Experience with ETL tools like Informatica, Talend, Mati Llion, or Five Tran.7.Familiarity with DBT (Data Build Tool)Additional Information:- The candidate should have minimum 7.5 years of experience in Databricks Unified Data Analytics Platform.- This position is based at our Bengaluru office.&lt;br&gt;Educational Qualification:- 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814129/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814129/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Mon, 02 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814118/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Databricks Unified Data Analytics Platform&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;br&gt;&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the data architecture aligns with the overall business objectives and technical specifications. You will collaborate with various teams to ensure that the data architecture is robust, scalable, and efficient, while also addressing any challenges that arise during the development process. Your role will be pivotal in shaping the data landscape of the organization, enabling data-driven decision-making and fostering innovation through effective data management practices. Responsibilities:Develop high-quality, scalable ETL/ELT pipelines using Databricks technologies including Delta Lake, Auto Loader, and DLT.Excellent programming and debugging skills in Python.Strong hands-on experience with Py Spark to build efficient data transformation and validation logic.Must be proficient in at least one cloud platform:AWS, GCP, or Azure.Create modular DBX functions for transformation, PII masking, and validation logic reusable across DLT and notebook pipelines.Implement ingestion patterns using Auto Loader with checkpointing and schema evolution for structured and semi-structured data.Build secure and observable DLT pipelines with DLT Expectations, supporting Bronze/Silver/Gold medallion layering.Configure Unity Catalog:set up catalogs, schemas, user/group access, enable audit logging, and define masking for PII fields.Enable secure data access across domains and workspaces via Unity Catalog External Locations, Volumes, and lineage tracking.Access and utilize data assets from the Databricks Marketplace to support enrichment, model training, or benchmarking.Collaborate with data sharing stakeholders to implement Delta Sharing both internally and externally.Integrate Power BI/Tableau/Looker with Databricks using optimized connectors (ODBC/JDBC) and Unity Catalog security controls.Build stakeholder-facing SQL Dashboards within Databricks to monitor KPIs, data pipeline health, and operational SLAs.Prepare Gen AI-compatible datasets:manage vector embeddings, index with Databricks Vector Search, and use Feature Store with ML flow.Package and deploy pipelines using Databricks Asset Bundles through CI/CD pipelines in GitHub or GitLab.Troubleshoot, tune, and optimize jobs using Photon engine and serverless compute, ensuring cost efficiency and SLA reliability.Experience with cloud-based services relevant to data engineering, data storage, data processing, data warehousing, real-time streaming, and serverless computing.Hands on Experience in applying Performance optimization techniquesUnderstanding data modeling and data warehousing principles is essential.Nice to Have:1.Certifications:Databricks Certified Professional or similar certifications.2.Machine Learning:Knowledge of machine learning concepts and experience with popular ML libraries.3.Knowledge of big data processing (e.g., Spark, Hadoop, Hive, Kafka)4.Data Orchestration:Apache Airflow.5.Knowledge of CI/CD pipelines and DevOps practices in a cloud environment.6.Experience with ETL tools like Informatica, Talend, Mati Llion, or Five Tran.7.Familiarity with DBT (Data Build Tool)Additional Information:- The candidate should have minimum 7.5 years of experience in Databricks Unified Data Analytics Platform.- This position is based at our Bengaluru office.&lt;br&gt;Educational Qualification:- 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814118/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814118/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Mon, 02 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814139/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Machine Learning&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;15&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the data architecture aligns with the overall business objectives and technical specifications. You will collaborate with various teams to ensure that the data architecture supports the application??s functionality and performance needs, while also considering scalability and security aspects. &lt;li&gt;Roles &amp;amp; Responsibilities:&lt;br&gt;&lt;/li&gt;&lt;li&gt;Expected to be a Subject Matter Expert with deep knowledge and experience.&lt;/li&gt;&lt;li&gt;Should have influencing and advisory skills.&lt;/li&gt;&lt;li&gt;Responsible for team decisions.&lt;/li&gt;&lt;li&gt;Engage with multiple teams and contribute on key decisions.&lt;/li&gt;&lt;li&gt;Expected to provide solutions to problems that apply across multiple teams.&lt;/li&gt;&lt;li&gt;Facilitate workshops and discussions to gather requirements and feedback from stakeholders.&lt;/li&gt;&lt;li&gt;Continuously evaluate and improve data architecture practices to enhance efficiency and effectiveness. Professional &amp;amp; Technical &lt;b&gt;Skills:&lt;/b&gt;&lt;br&gt;Must To Have &lt;b&gt;Skills:&lt;/b&gt;&lt;br&gt;Proficiency in Machine Learning.&lt;/li&gt;&lt;li&gt;Strong understanding of data modeling techniques and best practices.&lt;/li&gt;&lt;li&gt;Experience with data integration tools and methodologies.&lt;/li&gt;&lt;li&gt;Familiarity with cloud-based data storage solutions.&lt;/li&gt;&lt;li&gt;Ability to design and implement data governance frameworks. &lt;br&gt;&lt;/li&gt;&lt;li&gt;Additional Information:&lt;br&gt;&lt;/li&gt;&lt;li&gt;The candidate should have minimum 15 years of experience in Machine Learning.&lt;/li&gt;&lt;li&gt;This position is based at our Bengaluru office.&lt;/li&gt;&lt;li&gt;A 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814139/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814139/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Mon, 02 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814663/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Databricks Unified Data Analytics Platform&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the data architecture aligns with the overall business objectives and technical specifications. You will collaborate with various teams to ensure that the data architecture is robust, scalable, and efficient, while also addressing any challenges that arise during the development process. Your role will be pivotal in shaping the data landscape of the organization, enabling data-driven decision-making and fostering innovation through effective data management practices. Responsibilities:Develop high-quality, scalable ETL/ELT pipelines using Databricks technologies including Delta Lake, Auto Loader, and DLT.Excellent programming and debugging skills in Python.Strong hands-on experience with Py Spark to build efficient data transformation and validation logic.Must be proficient in at least one cloud platform:AWS, GCP, or Azure.Create modular DBX functions for transformation, PII masking, and validation logic reusable across DLT and notebook pipelines.Implement ingestion patterns using Auto Loader with checkpointing and schema evolution for structured and semi-structured data.Build secure and observable DLT pipelines with DLT Expectations, supporting Bronze/Silver/Gold medallion layering.Configure Unity Catalog:set up catalogs, schemas, user/group access, enable audit logging, and define masking for PII fields.Enable secure data access across domains and workspaces via Unity Catalog External Locations, Volumes, and lineage tracking.Access and utilize data assets from the Databricks Marketplace to support enrichment, model training, or benchmarking.Collaborate with data sharing stakeholders to implement Delta Sharing both internally and externally.Integrate Power BI/Tableau/Looker with Databricks using optimized connectors (ODBC/JDBC) and Unity Catalog security controls.Build stakeholder-facing SQL Dashboards within Databricks to monitor KPIs, data pipeline health, and operational SLAs.Prepare Gen AI-compatible datasets:manage vector embeddings, index with Databricks Vector Search, and use Feature Store with ML flow.Package and deploy pipelines using Databricks Asset Bundles through CI/CD pipelines in GitHub or GitLab.Troubleshoot, tune, and optimize jobs using Photon engine and serverless compute, ensuring cost efficiency and SLA reliability.Experience with cloud-based services relevant to data engineering, data storage, data processing, data warehousing, real-time streaming, and serverless computing.Hands on Experience in applying Performance optimization techniquesUnderstanding data modeling and data warehousing principles is essential.Nice to Have:1.Certifications:Databricks Certified Professional or similar certifications.2.Machine Learning:Knowledge of machine learning concepts and experience with popular ML libraries.3.Knowledge of big data processing (e.g., Spark, Hadoop, Hive, Kafka)4.Data Orchestration:Apache Airflow.5.Knowledge of CI/CD pipelines and DevOps practices in a cloud environment.6.Experience with ETL tools like Informatica, Talend, Mati Llion, or Five Tran.7.Familiarity with DBT (Data Build Tool)Additional Information:- The candidate should have minimum 7.5 years of experience in Databricks Unified Data Analytics Platform.- This position is based at our Bengaluru office.&lt;br&gt;Educational Qualification:- 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814663/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814663/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Mon, 02 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814671/oracle-epm-planning-usi-lead-developer-at-cirruslabs/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;Work youll do &lt;br&gt; As a Consultant in Clients Planning/EPBCS/PBCS/PCMCS team, you will: &lt;br&gt; Lead the design and development of Hyperion Planning, Essbase, PBCS, EPBCS, and PCMCS applications &lt;br&gt; Drive requirement gathering, delivery planning, and risk assessment for complex client engagements &lt;br&gt; Oversee technical delivery and ensure the highest quality of project outcomes &lt;br&gt; Proactively identify, address, and escalate issues to maintain project momentum &lt;br&gt; Guide project activities from initial requirements through Hypercare support &lt;br&gt; Collaborate closely with leads, onsite teams, and stakeholders to resolve challenges and optimize solutions &lt;br&gt; Work independently and mentor junior team members with technical and functional direction Qualifications &lt;br&gt; Must Have: &lt;br&gt; 4-6 years experience with Hyperion Planning, Essbase, Oracle PBCS/EPBCS &lt;br&gt; Proven project delivery in EPM Suite 11.x, PBCS, EPBCS, PCMCS &lt;br&gt; Advanced ASO/BSO cube development and MDX calculation expertise &lt;br&gt; Skilled in creating and customizing business and allocation rules &lt;br&gt; Strong capability in building complex reports via Financial Reporting Studio/Web Studio &lt;br&gt; Integration experience with FDMEE/Data Management &lt;br&gt; Ability to conduct cost-benefit analysis for solution design and delivery &lt;br&gt; Proficient in user provisioning and security setup in Shared Services &lt;br&gt; Good to Have: &lt;br&gt; Experience with prototyping, enhancements, and performance tuning &lt;br&gt; Familiarity with EPM Automate, Batch/Shell, and Groovy scripting &lt;br&gt; Knowledge of data and metadata management in EPBCS/PBCS &lt;br&gt; Exposure to EPRCS or third-party reporting tools Education: CA/BE/B.Tech/M.C.A/M.Sc (CS) degree or equivalent from accredited university&quot;,&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Software Product&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Cirruslabs&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814671/oracle-epm-planning-usi-lead-developer-at-cirruslabs/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814671/oracle-epm-planning-usi-lead-developer-at-cirruslabs/</link>
  <title>[Full Time] Oracle Epm Planning - Usi Lead Developer at Cirruslabs</title>
  <dc:date>Mon, 02 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814417/regular-xceptor-developer-at-luxoft/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt;&lt;strong&gt;Project description&lt;/strong&gt;&lt;p&gt;Our client is a leading commodity trading and logistics company.They are committed to building and maintaining world-class IT applications and infrastructure.The Trading IT group directly supports the trading business, and this business has started a far-reaching programme to enhance and improve its trading applications using an innovative architecture to support business growth across the full range of business lines and geographies, and to enable the sharing of systems across different businesses.&lt;/p&gt;&lt;p&gt;This programme is aimed at delivering functional capabilities, enhancements, and technical infrastructure upgrades to enable continued business growth and enhanced profitability for the firm.Client is looking to replace existing reconciliation system Gresham with Exceptor which will be enterprise-wide recon platform across FO, MO and BO &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;strong&gt;Responsibilities&lt;/strong&gt;&lt;div&gt;&lt;p&gt;Key Responsibilities &lt;/p&gt;&lt;/div&gt;&lt;ul&gt;&lt;li&gt;Solution Design Development &lt;/li&gt;&lt;li&gt;Analyze business requirements and translate them into technical solutions using Xceptor. &lt;/li&gt;&lt;li&gt;Design and develop data ingestion, transformation, and validation workflows. &lt;/li&gt;&lt;li&gt;Create rule-based logic to manage data exceptions and automate decision-making. &lt;/li&gt;&lt;li&gt;Data Integration &lt;/li&gt;&lt;li&gt;Integrate Xceptor with various data sources such as flat files (CSV, XML, Excel), databases, APIs, or third-party systems. &lt;/li&gt;&lt;li&gt;Configure ingestion layers for structured and unstructured data. &lt;/li&gt;&lt;li&gt;Ensure compatibility and smooth data flow between upstream/downstream systems. &lt;/li&gt;&lt;li&gt;Workflow Automation &lt;/li&gt;&lt;li&gt;Build automated workflows for business processes (e.g., reconciliations, client onboarding, trade validations). &lt;/li&gt;&lt;li&gt;Develop exception handling and error management within the workflows. &lt;/li&gt;&lt;li&gt;Testing Validation &lt;/li&gt;&lt;li&gt;Perform unit testing of developed workflows and rules. &lt;/li&gt;&lt;li&gt;Work with QA teams to support system integration testing (SIT) and user acceptance testing (UAT). &lt;/li&gt;&lt;li&gt;Ensure outputs meet business data quality standards. &lt;/li&gt;&lt;li&gt;Deployment Support &lt;/li&gt;&lt;li&gt;Assist in promoting solutions from development to production environments. &lt;/li&gt;&lt;li&gt;Provide post-deployment support, including defect fixing and performance tuning. &lt;/li&gt;&lt;li&gt;Monitor workflows and troubleshoot issues in production. &lt;/li&gt;&lt;li&gt;Documentation Reporting &lt;/li&gt;&lt;li&gt;Create and maintain technical documentation for developed workflows, transformations, and rules. &lt;/li&gt;&lt;li&gt;Prepare user manuals and training guides for business users. &lt;/li&gt;&lt;li&gt;Build reporting dashboards if required. &lt;/li&gt;&lt;li&gt;Collaboration &lt;/li&gt;&lt;li&gt;Work closely with business analysts, data analysts, and project managers to align technical implementations with business needs. &lt;/li&gt;&lt;li&gt;Liaise with infrastructure and security teams to ensure compliance with enterprise policies. &lt;/li&gt;&lt;li&gt;Continuous Improvement &lt;/li&gt;&lt;li&gt;Identify opportunities for process optimization or reuse of components. &lt;/li&gt;&lt;li&gt;Stay updated on new Xceptor features and best practices to improve efficiency and maintainability. &lt;/li&gt;&lt;/ul&gt;&lt;/div&gt;&lt;div&gt;&lt;strong&gt;Skills&lt;/strong&gt;&lt;/div&gt;&lt;p&gt;Must have &lt;/p&gt;&lt;div&gt;&lt;div&gt;&lt;p&gt;Preferred Experience &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;a) 10+ years of overall experience out of which at least 2 years on Xceptor &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;b) If not many Xceptor recon resources available then we can consider resources with Smartstream TLM, Intellimatch, Duco experience &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;Database and Data Handling &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;a) Strong skills in working with RDBMS (e.g., Oracle, SQL Server, PostgreSQL) &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;b) Experience in handling large datasets and data reconciliation &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;ETL and Data Transformation &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;a) Understanding of ETL concepts and tools (Xceptor is often used as an ETL tool) &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;b) Experience with data cleansing, validation, and transformation logic &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;SDLC and Agile Methodologies &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;a) Experience working in Agile/Scrum environments &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;b) Exposure to DevOps and CI/CD pipelines (JIRA, Jenkins, etc.) &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;Problem Solving and Debugging &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;a) Ability to debug workflows and resolve complex data transformation issues &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;Communication and Collaboration &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;a) Strong communication skills to interact with BA, QA, and business stakeholders &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;b) Ability to translate business requirements into technical workflows &lt;/p&gt;&lt;/div&gt;&lt;/div&gt;&lt;p&gt;&lt;strong&gt;Nice to have &lt;/strong&gt;&lt;/p&gt;&lt;div&gt;&lt;div&gt;&lt;p&gt;An ideal candidate will also have expertise in some or all of the following: &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;Jira or a similar issue-tracking systems &lt;/p&gt;&lt;/div&gt;&lt;/div&gt;&lt;div&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Legal&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Luxoft&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Mumbai&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814417/regular-xceptor-developer-at-luxoft/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814417/regular-xceptor-developer-at-luxoft/</link>
  <title>[Full Time] Regular Xceptor Developer at Luxoft</title>
  <dc:date>Mon, 02 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814190/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Databricks Unified Data Analytics Platform&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the data architecture aligns with the overall business objectives and technical specifications. You will collaborate with various teams to ensure that the data architecture is robust, scalable, and efficient, while also addressing any challenges that arise during the development process. Your role will be pivotal in shaping the data landscape of the organization, enabling data-driven decision-making and fostering innovation through effective data management practices. Responsibilities:Develop high-quality, scalable ETL/ELT pipelines using Databricks technologies including Delta Lake, Auto Loader, and DLT.Excellent programming and debugging skills in Python.Strong hands-on experience with Py Spark to build efficient data transformation and validation logic.Must be proficient in at least one cloud platform:AWS, GCP, or Azure.Create modular DBX functions for transformation, PII masking, and validation logic reusable across DLT and notebook pipelines.Implement ingestion patterns using Auto Loader with checkpointing and schema evolution for structured and semi-structured data.Build secure and observable DLT pipelines with DLT Expectations, supporting Bronze/Silver/Gold medallion layering.Configure Unity Catalog:set up catalogs, schemas, user/group access, enable audit logging, and define masking for PII fields.Enable secure data access across domains and workspaces via Unity Catalog External Locations, Volumes, and lineage tracking.Access and utilize data assets from the Databricks Marketplace to support enrichment, model training, or benchmarking.Collaborate with data sharing stakeholders to implement Delta Sharing both internally and externally.Integrate Power BI/Tableau/Looker with Databricks using optimized connectors (ODBC/JDBC) and Unity Catalog security controls.Build stakeholder-facing SQL Dashboards within Databricks to monitor KPIs, data pipeline health, and operational SLAs.Prepare Gen AI-compatible datasets:manage vector embeddings, index with Databricks Vector Search, and use Feature Store with ML flow.Package and deploy pipelines using Databricks Asset Bundles through CI/CD pipelines in GitHub or GitLab.Troubleshoot, tune, and optimize jobs using Photon engine and serverless compute, ensuring cost efficiency and SLA reliability.Experience with cloud-based services relevant to data engineering, data storage, data processing, data warehousing, real-time streaming, and serverless computing.Hands on Experience in applying Performance optimization techniquesUnderstanding data modeling and data warehousing principles is essential.Nice to Have:1.Certifications:Databricks Certified Professional or similar certifications.2.Machine Learning:Knowledge of machine learning concepts and experience with popular ML libraries.3.Knowledge of big data processing (e.g., Spark, Hadoop, Hive, Kafka)4.Data Orchestration:Apache Airflow.5.Knowledge of CI/CD pipelines and DevOps practices in a cloud environment.6.Experience with ETL tools like Informatica, Talend, Mati Llion, or Five Tran.7.Familiarity with DBT (Data Build Tool)Additional Information:- The candidate should have minimum 7.5 years of experience in Databricks Unified Data Analytics Platform.- This position is based at our Bengaluru office.&lt;br&gt;Educational Qualification:- 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814190/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814190/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Mon, 02 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814670/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Databricks Unified Data Analytics Platform&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the data architecture aligns with the overall business objectives and technical specifications. You will collaborate with various teams to ensure that the data architecture is robust, scalable, and efficient, while also addressing any challenges that arise during the development process. Your role will be pivotal in shaping the data landscape of the organization, enabling data-driven decision-making and fostering innovation through effective data management practices. Responsibilities:Develop high-quality, scalable ETL/ELT pipelines using Databricks technologies including Delta Lake, Auto Loader, and DLT.Excellent programming and debugging skills in Python.Strong hands-on experience with Py Spark to build efficient data transformation and validation logic.Must be proficient in at least one cloud platform:AWS, GCP, or Azure.Create modular DBX functions for transformation, PII masking, and validation logic reusable across DLT and notebook pipelines.Implement ingestion patterns using Auto Loader with checkpointing and schema evolution for structured and semi-structured data.Build secure and observable DLT pipelines with DLT Expectations, supporting Bronze/Silver/Gold medallion layering.Configure Unity Catalog:set up catalogs, schemas, user/group access, enable audit logging, and define masking for PII fields.Enable secure data access across domains and workspaces via Unity Catalog External Locations, Volumes, and lineage tracking.Access and utilize data assets from the Databricks Marketplace to support enrichment, model training, or benchmarking.Collaborate with data sharing stakeholders to implement Delta Sharing both internally and externally.Integrate Power BI/Tableau/Looker with Databricks using optimized connectors (ODBC/JDBC) and Unity Catalog security controls.Build stakeholder-facing SQL Dashboards within Databricks to monitor KPIs, data pipeline health, and operational SLAs.Prepare Gen AI-compatible datasets:manage vector embeddings, index with Databricks Vector Search, and use Feature Store with ML flow.Package and deploy pipelines using Databricks Asset Bundles through CI/CD pipelines in GitHub or GitLab.Troubleshoot, tune, and optimize jobs using Photon engine and serverless compute, ensuring cost efficiency and SLA reliability.Experience with cloud-based services relevant to data engineering, data storage, data processing, data warehousing, real-time streaming, and serverless computing.Hands on Experience in applying Performance optimization techniquesUnderstanding data modeling and data warehousing principles is essential.Nice to Have:1.Certifications:Databricks Certified Professional or similar certifications.2.Machine Learning:Knowledge of machine learning concepts and experience with popular ML libraries.3.Knowledge of big data processing (e.g., Spark, Hadoop, Hive, Kafka)4.Data Orchestration:Apache Airflow.5.Knowledge of CI/CD pipelines and DevOps practices in a cloud environment.6.Experience with ETL tools like Informatica, Talend, Mati Llion, or Five Tran.7.Familiarity with DBT (Data Build Tool)Additional Information:- The candidate should have minimum 7.5 years of experience in Databricks Unified Data Analytics Platform.- This position is based at our Bengaluru office.&lt;br&gt;Educational Qualification:- 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814670/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814670/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Mon, 02 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814431/cloud-native-app-developer-standard-at-infogain/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;p&gt; &lt;strong&gt; &lt;span&gt;Core Skills&lt;/span&gt; &lt;/strong&gt; &lt;/p&gt; &lt;p&gt;Advanced cloud technologies and programming Developing and implementing cloud-based applications using frameworks such as Node.js or .NET Core Deep understanding of database technologies such as SQL Server or Oracle Knowledge of software development methodologies and best practices Familiarity with cloud-native architecture and DevOps practices Strong communication and collaboration skills&lt;/p&gt; &lt;/div&gt; EXPERIENCE &lt;ul&gt; &lt;li&gt;4.5-6 Years&lt;/li&gt; &lt;/ul&gt; SKILLS &lt;ul&gt; &lt;li&gt;Primary Skill: CNA Development&lt;/li&gt; &lt;li&gt;Sub Skill(s): CNA Development &lt;/li&gt; &lt;li&gt;Additional Skill(s): .NET Core, .NET Web API (restful APIs)&lt;/li&gt; &lt;/ul&gt; &lt;span&gt; &lt;/span&gt; &lt;div&gt; &lt;div&gt;Job for Automation&lt;/div&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Infogain&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814431/cloud-native-app-developer-standard-at-infogain/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814431/cloud-native-app-developer-standard-at-infogain/</link>
  <title>[Full Time] Cloud Native App Developer (Standard) at Infogain</title>
  <dc:date>Mon, 02 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814091/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Databricks Unified Data Analytics Platform&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the data architecture aligns with the overall business objectives and technical specifications. You will collaborate with various teams to ensure that the data architecture is robust, scalable, and efficient, while also addressing any challenges that arise during the development process. Your role will be pivotal in shaping the data landscape of the organization, enabling data-driven decision-making and fostering innovation through effective data management practices. Responsibilities:Develop high-quality, scalable ETL/ELT pipelines using Databricks technologies including Delta Lake, Auto Loader, and DLT.Excellent programming and debugging skills in Python.Strong hands-on experience with Py Spark to build efficient data transformation and validation logic.Must be proficient in at least one cloud platform:AWS, GCP, or Azure.Create modular DBX functions for transformation, PII masking, and validation logic reusable across DLT and notebook pipelines.Implement ingestion patterns using Auto Loader with checkpointing and schema evolution for structured and semi-structured data.Build secure and observable DLT pipelines with DLT Expectations, supporting Bronze/Silver/Gold medallion layering.Configure Unity Catalog:set up catalogs, schemas, user/group access, enable audit logging, and define masking for PII fields.Enable secure data access across domains and workspaces via Unity Catalog External Locations, Volumes, and lineage tracking.Access and utilize data assets from the Databricks Marketplace to support enrichment, model training, or benchmarking.Collaborate with data sharing stakeholders to implement Delta Sharing both internally and externally.Integrate Power BI/Tableau/Looker with Databricks using optimized connectors (ODBC/JDBC) and Unity Catalog security controls.Build stakeholder-facing SQL Dashboards within Databricks to monitor KPIs, data pipeline health, and operational SLAs.Prepare Gen AI-compatible datasets:manage vector embeddings, index with Databricks Vector Search, and use Feature Store with ML flow.Package and deploy pipelines using Databricks Asset Bundles through CI/CD pipelines in GitHub or GitLab.Troubleshoot, tune, and optimize jobs using Photon engine and serverless compute, ensuring cost efficiency and SLA reliability.Experience with cloud-based services relevant to data engineering, data storage, data processing, data warehousing, real-time streaming, and serverless computing.Hands on Experience in applying Performance optimization techniquesUnderstanding data modeling and data warehousing principles is essential.Nice to Have:1.Certifications:Databricks Certified Professional or similar certifications.2.Machine Learning:Knowledge of machine learning concepts and experience with popular ML libraries.3.Knowledge of big data processing (e.g., Spark, Hadoop, Hive, Kafka)4.Data Orchestration:Apache Airflow.5.Knowledge of CI/CD pipelines and DevOps practices in a cloud environment.6.Experience with ETL tools like Informatica, Talend, Mati Llion, or Five Tran.7.Familiarity with DBT (Data Build Tool)Additional Information:- The candidate should have minimum 7.5 years of experience in Databricks Unified Data Analytics Platform.- This position is based at our Bengaluru office.&lt;br&gt;Educational Qualification:- 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814091/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814091/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Mon, 02 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814547/nas-impl-ice-business-analyst-at-adp/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;Job Description Business Analysts will be responsible for collecting requirements from client and vendor for NAS products like Enterprise, Vantage ADPR, Vantage UVI, SDG, MR, Autopay with skill to work on Classic tools as well as the Configuration to be carried out in the said applications. Should have good understanding of HCM domain, Payroll and Benefits Gathering requirements from client and vendor. Should be well versed with the BA Tools and Techniques Carrying out Impact and Gap analysis based on requirement received. Creation of contingency plan and providing alternative solution to client and vendor Should be very strong in communication (written and verbal) Should have worked in Agile and Waterfall model Should have good understanding of SDLC phases and work structure Should be able to create Detailed Project Plan using Excel, MPP etc, keep track of it and justify the plan Should be able to Should have SQL, API, JSON, Manual Testing knowledge Should be able to understand the bug tracking flow (STLC) Should be able to perform the Unit Testing, UAT, write test cases, create test matrix Should schedule meeting with client and vendor for walkthrough of Functional Requirement Document (FRD), Business Requirement Document (BRD), SRS and Test Matrix Should be able to understand SFTP/MFT setup and complete the configuration Should be able to triage the issue quickly and provide solutions Help and support new associates Ability to explain domain concept to Development Team Essential Duties and Responsibilities: Implement Interfaces as a liaison between ADP and Client/Vendor system Understand the 5 Project Management process groups of Initiating, Planning, Executing, Monitoring &amp;amp; Controlling and Closing Experience in writing SQL queries with various types of joins for data validation Good experience with MS Excel (V-Lookup, H-Lookup, Pivot) Writing FRD, BRD, SRS etc Writing Test cases and Creating Test Matrix, Understanding of RTM Should be able to handle stakeholders (Internal and External) communication in a very clear and precise manner Should be able to create contingency plan Should have good knowledge and understanding about SFTP/FTP/MFT Should be able to jump any quick calls and provide solutions Should have worked in handling data, preparing reports using excel or any other data tool Should be able to train and mentor at least 2 associates on HCM Domain and Process knowledge &lt;div&gt; Desired Skills Excellent analytical and problem-solving skills Excellent team player and interpersonal skills Excellent communication and presentation skills Ability to coordinate with others team members to accomplish objectives. Ability to prioritize the job and meet the deadlines/SLAs Ability to meet deadlines and attention to detail MS Office tools. Should be able to create RCA Should be able to work independently as Project Manager Eligibility Criteria: Education: Bachelors Degree in Engineering/Technology, BCA, BCS, B.Sc. Computer Science or equivalent or B.Com with technical certification Experience: Minimum 3 to 5 years of experience. Strong knowledge on HCM product (HR, Payroll) functionality. Strong knowledge of PL/SQL and RDBMS concepts and Manual Testing Strong communication skills Analytical and Agile mindset &lt;/div&gt; &lt;div&gt; We are a comprehensive global provider of cloud-based human capital management (HCM) solutions that unite HR, payroll, talent, time, tax and benefits administration and a leader in business outsourcing services, analytics, and compliance expertise. We believe our people make all the difference in cultivating a down-to-earth culture that embraces our core values, welcomes ideas, encourages innovation, and values belonging. we&apos;ve received recognition for our work by many esteemed organizations, learn more at ADP Awards and Recognition &lt;/div&gt; &lt;div&gt; &lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Textile &amp;amp; Apparel&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Analyst&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;ADP&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Pune&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814547/nas-impl-ice-business-analyst-at-adp/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814547/nas-impl-ice-business-analyst-at-adp/</link>
  <title>[Full Time] NAS Impl ICE Business Analyst at ADP</title>
  <dc:date>Mon, 02 Mar 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814321/databricks-developer-at-infobeans/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;div&gt; &lt;div&gt; &lt;strong&gt; Roles and Responsibility &lt;/strong&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; Design and implement data pipelines using Databricks. &lt;/li&gt; &lt;li&gt; Collaborate with cross-functional teams to identify and prioritize project requirements. &lt;/li&gt; &lt;li&gt; Develop and maintain large-scale data architectures and systems. &lt;/li&gt; &lt;li&gt; Troubleshoot and resolve complex technical issues related to Databricks. &lt;/li&gt; &lt;li&gt; Optimize system performance and ensure scalability and reliability. &lt;/li&gt; &lt;li&gt; Provide technical guidance and support to junior team members. &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;strong&gt; Job Requirements &lt;/strong&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; Strong understanding of Databricks concepts and technologies. &lt;/li&gt; &lt;li&gt; Experience working with big data processing and analytics tools. &lt;/li&gt; &lt;li&gt; Excellent problem-solving skills and attention to detail. &lt;/li&gt; &lt;li&gt; Ability to work collaboratively in a fast-paced environment. &lt;/li&gt; &lt;li&gt; Strong communication and interpersonal skills. &lt;/li&gt; &lt;li&gt; Familiarity with agile development methodologies and version control systems. &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;/div&gt;&lt;br&gt;&lt;b&gt;Location - &lt;/b&gt;Pune,Nagar,Indore,Bengaluru&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Infobeans&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Indore&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814321/databricks-developer-at-infobeans/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814321/databricks-developer-at-infobeans/</link>
  <title>[Full Time] Databricks Developer at Infobeans</title>
  <dc:date>Fri, 27 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814526/senior-technical-support-engineer-at-infobeans/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt;&lt;div&gt;&lt;div&gt;&lt;p&gt;&lt;b&gt;What will your role look like&lt;/b&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Act as a Senior Technical Support Engineer providing expert-level technical assistance to customers via phone and email.&lt;/li&gt;&lt;li&gt;Troubleshoot and resolve complex issues across software applications, APIs, integrations, infrastructure, cloud, and hardware environments.&lt;/li&gt;&lt;li&gt;Reproduce issues, perform root cause analysis, and collaborate closely with Engineering and Product teams on bug fixes and feature enhancements.&lt;/li&gt;&lt;li&gt;Lead or participate in incident management and on-call rotations for critical production issues.&lt;/li&gt;&lt;li&gt;Serve as a customer advocate by triaging issues to internal teams and third-party vendors, ensuring resolution within defined SLOs.&lt;/li&gt;&lt;li&gt;Support resellers by staying current with platform features and acting as a subject matter expert for diagnostics and issue resolution.&lt;/li&gt;&lt;li&gt;Identify recurring support trends and contribute to long-term product and process improvements.&lt;/li&gt;&lt;li&gt;Maintain accurate case records and status updates in ticketing systems.&lt;/li&gt;&lt;li&gt;Create and maintain technical documentation, runbooks, and FAQs.&lt;/li&gt;&lt;li&gt;Work closely with cross-functional teams and vendors to ensure timely issue resolution.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;b&gt;Why you will love this role&lt;/b&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Opportunity to work on complex, real-world technical challenges across modern SaaS, cloud, and API-driven platforms.&lt;/li&gt;&lt;li&gt;High-impact role where your expertise directly influences customer success and product improvement.&lt;/li&gt;&lt;li&gt;Strong collaboration with Engineering, Product, and Customer-facing teams.&lt;/li&gt;&lt;li&gt;Continuous learning environment with exposure to new technologies and evolving systems.&lt;/li&gt;&lt;li&gt;A fast-paced, customer-first culture that values ownership, accountability, and technical excellence.&lt;/li&gt;&lt;li&gt;Ability to make a meaningful difference by improving both customer experience and internal processes.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;b&gt;We would like you to bring along&lt;/b&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;4-7+ years of experience in technical support, solutions engineering, or a similar customer-facing technical role.&lt;/li&gt;&lt;li&gt;Strong hands-on experience with SaaS platforms, APIs, databases, and scripting (Python, Shell).&lt;/li&gt;&lt;li&gt;Experience working with cloud services such as AWS and Azure.&lt;/li&gt;&lt;li&gt;Solid understanding of web technologies, integrations, and system architecture.&lt;/li&gt;&lt;li&gt;Experience with ticketing systems (e.g., Zendesk, Jira) and knowledge base tools.&lt;/li&gt;&lt;li&gt;Strong troubleshooting, analytical, and problem-solving skills with the ability to communicate clearly under pressure.&lt;/li&gt;&lt;li&gt;Excellent verbal and written communication skills with a strong customer-first mindset.&lt;/li&gt;&lt;li&gt;Ability to prioritize effectively, manage customer expectations, and execute efficiently in a fast-paced environment.&lt;/li&gt;&lt;li&gt;Willingness to work flexible hours, including some weekends, to support a 247 business.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;b&gt;Good-to-have skills&lt;/b&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Experience working with web servers and databases such as Apache, IIS, MySQL, MSSQL, and PostgreSQL.&lt;/li&gt;&lt;li&gt;Knowledge of application protocols including DNS, HTTP, HTTPS (SSL), and FTP.&lt;/li&gt;&lt;li&gt;Basic experience working in Linux environments.&lt;/li&gt;&lt;li&gt;Familiarity with API and web service technologies such as REST, JSON, and OAuth.&lt;/li&gt;&lt;li&gt;Prior experience supporting resellers or working with third-party vendors.&lt;/li&gt;&lt;li&gt;Exposure to incident management, on-call rotations, or SRE/DevOps practices.&lt;/li&gt;&lt;li&gt;Secondary degree or relevant technical certifications.&lt;/li&gt;&lt;/ul&gt;&lt;/div&gt;&lt;div&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;div&gt;&lt;div&gt;&lt;/div&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Infobeans&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Indore&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814526/senior-technical-support-engineer-at-infobeans/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814526/senior-technical-support-engineer-at-infobeans/</link>
  <title>[Full Time] Senior Technical Support Engineer at Infobeans</title>
  <dc:date>Fri, 27 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/816784/aws-cloud-support-senior-engineer-at-iris-software/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;ul&gt;&lt;li&gt;Must-Have Skills: Expertise in AWS CDK, Services(Lambda, ECS, S3) and PostgreSQL DB management. Strong understanding serverless architecture and event-driven design(SNS, SQS).&lt;/li&gt;&lt;/ul&gt;&lt;div&gt;&lt;ul&gt;&lt;li&gt;Nice to have: Knowledge of multi-account AWS Setups and Security best practices (IAM, VPC, etc.), Experience in cost optimization strategies in AWS.&lt;/li&gt;&lt;li&gt;AWS Redshift, Aurora, AWS Glue, AWS Lambda, etc.&lt;/li&gt;&lt;li&gt;Extensive experience with Data gathering and ingestion (from multiple sources), and manipulation, orchestration and optimization on AWS Cloud&lt;/li&gt;&lt;li&gt;Experience with SQL&lt;/li&gt;&lt;li&gt;PySpark / Python&lt;/li&gt;&lt;li&gt;Developing RESTful APIs&lt;/li&gt;&lt;li&gt;Skilled in using ETL tools&lt;/li&gt;&lt;li&gt;Proven track record of leading multi-shore teams on sizable Cloud + Data initiatives&lt;/li&gt;&lt;li&gt;Working in an Agile Environment&lt;/li&gt;&lt;li&gt;Excellent communication, presentation and client interaction skills&lt;/li&gt;&lt;li&gt;Ability to work in a very dynamic environment with multiple onshore stakeholders&lt;/li&gt;&lt;/ul&gt;&lt;div&gt;&lt;b&gt;&lt;b&gt;Mandatory Competencies&lt;/b&gt;&lt;/b&gt;&lt;br&gt;&lt;/div&gt;&lt;ul&gt;&lt;li&gt;Cloud - AWS - AWS Lambda,AWS EventBridge, AWS Fargate&lt;/li&gt;&lt;li&gt;Cloud - AWS - AWS S3, S3 glacier, AWS EBS&lt;/li&gt;&lt;li&gt;Cloud - AWS - ECS&lt;/li&gt;&lt;li&gt;Database - PostgreSQL - PostgreSQL&lt;/li&gt;&lt;li&gt;Cloud - AWS - AWS SNS, AWS SQS, AWS Kinesis&lt;/li&gt;&lt;li&gt;Cloud - AWS - Tensorflow on AWS, AWS Glue, AWS EMR, Amazon Data Pipeline, AWS Redshift&lt;/li&gt;&lt;li&gt;ETL - ETL - AWS Glue&lt;/li&gt;&lt;li&gt;Big Data - Big Data - Pyspark&lt;/li&gt;&lt;li&gt;Agile - Agile - Extreme Programming&lt;/li&gt;&lt;li&gt;Programming Language - Python - Python Shell&lt;/li&gt;&lt;li&gt;Beh - Communication and collaboration&lt;/li&gt;&lt;/ul&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Iris Software&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Noida, Gurugram&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/816784/aws-cloud-support-senior-engineer-at-iris-software/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/816784/aws-cloud-support-senior-engineer-at-iris-software/</link>
  <title>[Full Time] AWS Cloud Support - Senior Engineer at Iris Software</title>
  <dc:date>Fri, 27 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/818985/sr-machine-learning-engineer-search-ai-at-apple/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;ul&gt; &lt;li&gt; As a Senior Machine Learning Engineer, you play a critical role in developing world-class Search and Q&amp;amp;A experiences for Apple customers with cutting-edge search technologies and large language models &lt;/li&gt; &lt;li&gt; ,description:Our team is responsible for delivering next-generation Search and Question Answering systems across Apple products including Siri, Safari, Spotlight, and more &lt;/li&gt; &lt;li&gt; This is your chance to shape how people get information by leveraging your Search and applied machine learning expertise along with robust software engineering skills &lt;/li&gt; &lt;li&gt; You will collaborate with outstanding Search and AI engineers on large scale machine learning to improve Query Understanding, Retrieval, and Ranking, developing fundamental building blocks needed for AI powe&apos;red experiences such as fine-tuning and reinforcement learning &lt;/li&gt; &lt;li&gt; This involves pushing the boundaries on document retrieval and ranking, developing sophisticated machine learning models, using embeddings and deep learning to understand the quality of matches &lt;/li&gt; &lt;li&gt; It also includes online learning to react quickly to change and natural language processing to understand queries &lt;/li&gt; &lt;li&gt; You will work with petabytes of data and combine information from multiple structured and unstructured sources to provide the best results and accurate answers to satisfy users information-seeking needs &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;strong&gt; Requirements &lt;/strong&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; 12+ years experience in shipping Search and Q&amp;amp;A technologies and ML systems &lt;/li&gt; &lt;li&gt; Excellent programming skills in mainstream programming languages such as C++, Python, Scala, and Go &lt;/li&gt; &lt;li&gt; Experience delivering tooling and frameworks to evaluate individual components and end-to-end quality &lt;/li&gt; &lt;li&gt; Strong analytical skills to systematically identify opportunities to improve search relevance and answer accuracy &lt;/li&gt; &lt;li&gt; Strong written and verbal communication with the ability to articulate complex topics &lt;/li&gt; &lt;li&gt; Excellent interpersonal skills and teamwork; demonstrated ability to connect and collaborate with others &lt;/li&gt; &lt;li&gt; Passion for building phenomenal products and curiosity to learn, &lt;/li&gt; &lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Consumer Electronics &amp;amp; Appliances&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Architect / Designer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Apple&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/818985/sr-machine-learning-engineer-search-ai-at-apple/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/818985/sr-machine-learning-engineer-search-ai-at-apple/</link>
  <title>[Full Time] Sr Machine Learning Engineer, Search &amp;amp; AI at Apple</title>
  <dc:date>Fri, 27 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814424/sr-power-bi-developer-at-infobeans/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt;&lt;div&gt;&lt;div&gt;&lt;p&gt;&lt;strong&gt;Must have skills:&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;1. &lt;strong&gt;Power BI&lt;/strong&gt;&lt;/span&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Visualization Produce compelling and informative visualizations using various native and custom chart types. Create and maintain relationships between visuals, filters, bookmarks, and numeric/field parameters. Design strategic visual interactions that enhance the end-users experience using cross- filtering and cross- highlighting. &lt;/li&gt;&lt;li&gt;DAX Language Write and optimize DAX expressions to create measures and calculated columns. Familiarity with common filtering functions (CALCULATE, FILTER, etc.) and iteration functions (SUMX, AVERAGEX, etc.) &lt;/li&gt;&lt;li&gt;Modeling Create effective data models. Maintain relationship cardinality and cross- filtering between tables. Understand the use cases for Import, DirectQuery, Dual, and Live data storage modes. &lt;/li&gt;&lt;li&gt;Publishing Manage online deployment pipelines to test and publish Power BI reports. Manage user roles and implement row-level security to restrict data access. &lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;span&gt;&lt;strong&gt;2. Power Query&lt;/strong&gt;&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;Preparation Clean, transform, reshape, and aggregate data from different sources such as Excel, SQL Server, SharePoint, etc. Create dynamic, reusable queries using parameters. &lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;M Language Knowledge of different data types and data structures like values, records, tables, lists, etc. Familiarity with built-in functions and the ability to write custom functions. Understand native query folding to optimize performance. &lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;3. SQL Experience writing and optimizing queries. Strong understanding of relational databases&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Key Responsibilities:&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;Design &amp;amp; develop PowerBI dashboards and reports&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Experience: &lt;/strong&gt;&lt;span&gt;Min 8 years (Preferrable 10+ years of experience)&lt;/span&gt;&lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;div&gt;&lt;div&gt;&lt;/div&gt;&lt;/div&gt;&lt;br&gt;&lt;b&gt;Location - &lt;/b&gt;Pune,Nagar,Indore,Bengaluru&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Analyst&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Infobeans&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Indore&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814424/sr-power-bi-developer-at-infobeans/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814424/sr-power-bi-developer-at-infobeans/</link>
  <title>[Full Time] Sr. Power BI Developer at Infobeans</title>
  <dc:date>Fri, 27 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814983/data-architect-at-syngenta-india/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt; &lt;strong&gt;Role Summary&lt;/strong&gt; &lt;/p&gt; &lt;p&gt;Syngenta is looking for a proactive and driven Data Architect to join our cloud and Data Ops team. In this role, you will work on designing the system architecture and solution, ensuring the platform is scalable while performant, and creating automated data pipelines.&lt;/p&gt; &lt;p&gt; &lt;strong&gt; &lt;u&gt;Responsibilities:&lt;/u&gt; &lt;/strong&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt;Design and lead implementation of end-to-end Databricks Lakehouse Platforms using Delta Lake, Delta Live Tables, and MLflow.&lt;/li&gt; &lt;li&gt;Architect Medallion Architecture (Bronze/Silver/Gold) for structured, semi-structured, and streaming workloads.&lt;/li&gt; &lt;li&gt;Implement governed Lakehouse patterns using Unity Catalog for access control, lineage, data classification, and secure sharing.&lt;/li&gt; &lt;li&gt;Build scalable ETL/ELT pipelines using Databricks Notebooks, Workflows, SQL Warehouses, and Spark-based transformations.&lt;/li&gt; &lt;li&gt;Develop real-time streaming pipelines with Auto Loader, Structured Streaming, and event-driven platforms (Kafka, Kinesis, Pub/Sub).&lt;/li&gt; &lt;li&gt;Integrate Databricks with cloud-native services such as AWS Glue, Azure Data Factory, and GCP Dataform.&lt;/li&gt; &lt;li&gt;Define distributed integration patterns using REST APIs, microservices, and event-driven architectures.&lt;/li&gt; &lt;li&gt;Enforce data governance, RBAC/ABAC, encryption, secret management, and compliance controls.&lt;/li&gt; &lt;li&gt;Optimize Delta Lake tables, Spark workloads, and cluster configurations using Photon and autoscaling patterns.&lt;/li&gt; &lt;li&gt;Drive cloud cost optimization across storage, compute, and workflow orchestration.&lt;/li&gt; &lt;li&gt;Participate in architecture reviews, set standards, and support engineering teams throughout execution.&lt;/li&gt; &lt;li&gt;Stay current on Databricks capabilities including Unity Catalog updates, Lakehouse Federation, serverless compute, and AI/ML features.&lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt;Bachelor s or master s degree in computer science, Data Engineering, or related field.&lt;/li&gt; &lt;li&gt;10+ years of experience in enterprise software, cloud architecture, or data engineering roles.&lt;/li&gt; &lt;li&gt;Strong hands-on experience with Databricks, Apache Spark, Delta Lake, and Lakehouse platform design.&lt;/li&gt; &lt;li&gt;Experience implementing and administering Unity Catalog for governance, lineage, and fine-grained access control.&lt;/li&gt; &lt;li&gt;Experience designing Medallion Architecture for analytics and engineering workloads.&lt;/li&gt; &lt;li&gt;Hands-on experience with cloud platforms such as AWS, Azure, or GCP, including storage, compute, and networking services.&lt;/li&gt; &lt;li&gt;Experience with streaming technologies such as Kafka, Kinesis, or Pub/Sub.&lt;/li&gt; &lt;li&gt;Strong understanding of data modeling, workflow orchestration (Airflow, Databricks Workflows, dbt), and pipeline automation.&lt;/li&gt; &lt;li&gt;Familiarity with Scala-based Spark workloads in addition to PySpark and SQL pipelines.&lt;/li&gt; &lt;li&gt;Skilled in performance tuning, Spark optimization, cluster policies, and cloud cost management.&lt;/li&gt; &lt;li&gt;Excellent communication skills for technical leadership and stakeholder collaboration.&lt;/li&gt; &lt;li&gt;Certifications in Databricks, AWS Solution Architecture, or TOGAF are a plus.&lt;/li&gt; &lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Agriculture / Forestry / Fishing&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Syngenta&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Pune&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814983/data-architect-at-syngenta-india/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814983/data-architect-at-syngenta-india/</link>
  <title>[Full Time] Data Architect at Syngenta India</title>
  <dc:date>Thu, 26 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/813539/data-architect-at-ntt-data/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&lt;b&gt;Make an impact with NTT DATA&lt;/b&gt;&lt;br&gt;Join a company that is pushing the boundaries of what is possible. We are renowned for our technical excellence and leading innovations, and for making a difference to our clients and society. Our workplace embraces diversity and inclusion  its a place where you can grow, belong and thrive.&lt;/p&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;&lt;/p&gt;&lt;div&gt; &lt;b&gt;Your day at NTT DATA&lt;/b&gt;&lt;br&gt;The Data Governance Specialist is a seasoned subject matter expert, responsible for participating in designing and developing the company&apos;s data infrastructure.This role will be required to leverage skills and expertise to introduce new ideas and improve on existing solutions in line with established frameworks and governance.Additionally, this role plays an important part in shaping and performing data governance. This role develops and implements data governance frameworks, policies, and procedures to establish data governance best practices across the organization.&lt;br&gt;&lt;/div&gt;&lt;br&gt;&lt;div&gt; &lt;b&gt;Key responsibilities:&lt;/b&gt; &lt;ul&gt; &lt;li&gt;Collaborates with various stakeholders to analyze data requirements, develop data models, and ensure the integrity and security of our data infrastructure.&lt;/li&gt; &lt;li&gt;Accountable for defining how the data will be stored, consumed, integrated and managed by different data entities and IT systems, as well as any applications using or processing that data in some way.&lt;/li&gt; &lt;li&gt;Supports the creation of a data governance framework to meet business and technology requirements while ensuring data security and compliance with regulations, and ensure it is integrated in relevant processes across the organization.&lt;/li&gt; &lt;li&gt;Works across multiple teams for the optimization of data storage and retrieval, enhance data quality and governance, and guides the organization in making informed data-driven decisions.&lt;/li&gt; &lt;li&gt;Accountable for translating business requirements into databases, data warehouses, and data streams, collaborates with other teams and business stakeholders to understand data requirements and designs appropriate data models and structures.&lt;/li&gt; &lt;li&gt;Designs and implements data integration and data flow processes to ensure efficient data movement across different systems and platforms.&lt;/li&gt; &lt;li&gt;Analyzes, plans, and defines data architecture framework, including security, reference data, metadata, and master data.&lt;/li&gt; &lt;li&gt;Accountable for defining and implementing data governance policies and procedures to ensure data accuracy, consistency, and security.&lt;/li&gt; &lt;li&gt;Works across multiple teams for the planning and facilitation of data governance communities, workshops, and meetings.&lt;/li&gt; &lt;li&gt;Analyzes existing data systems and identifies opportunities for improvement, recommending innovative solutions to enhance data management and analytics capabilities.&lt;/li&gt; &lt;li&gt;Accountable for defining and maintaining data standards, data definitions, and data dictionaries to ensure consistency and interoperability.&lt;/li&gt; &lt;li&gt;Works across multiple teams for the definition of technical standards and guidelines that pertain to data and information use, security, access and governance (including defining accountabilities in support of data quality mandates).&lt;/li&gt; &lt;li&gt;Creates diagrams showing key data entities and creates an inventory of the data needed to implement the architecture vision.&lt;/li&gt; &lt;/ul&gt;&lt;/div&gt;&lt;br&gt;&lt;div&gt; &lt;b&gt;To thrive in this role, you need to have:&lt;/b&gt; &lt;ul&gt; &lt;li&gt;Seasoned understanding of data governance, data integration, data quality, data lifecycle processes and data management best practices, and how these impacts business performance.&lt;/li&gt; &lt;li&gt;Enterprise-wide view of the business and strong appreciation for strategy, processes and capabilities, enabling technologies, and governance.&lt;/li&gt; &lt;li&gt;Seasoned in preparing, facilitating, and driving forums, communities, workshops, and meetings with people across various levels of the organization.&lt;/li&gt; &lt;li&gt;A capable change agent and communicator knowing how to demonstrate the value of data governance, create organizational buy-in and get the organization to understand data governance.&lt;/li&gt; &lt;li&gt;Ability to collaborate and communicate effectively with team members, contributing to their success.&lt;/li&gt; &lt;li&gt;Ability to build relationships, manage business partners and navigate with ease at all levels of the organization from senior management to specialists.&lt;/li&gt; &lt;li&gt;Seasoned in developing communication and training materials and training people in Data Governance.&lt;/li&gt; &lt;li&gt;Good understanding of IT domains of information, security, application, technology infrastructure and integration&lt;/li&gt; &lt;li&gt;Problem-solving and analytical capabilities to safeguard data integrity, security, and organization&lt;/li&gt; &lt;li&gt;Good understanding of data security and privacy regulations (e.g., GDPR, CCPA) and experience implementing data protection measures.&lt;/li&gt; &lt;li&gt;Good understanding of data modelling techniques, data warehousing concepts, and database management systems.&lt;/li&gt; &lt;li&gt;Seasoned in SQL and experience with database development and performance optimization.&lt;/li&gt; &lt;li&gt;Seasoned in data visualization tools and techniques&lt;/li&gt; &lt;/ul&gt;&lt;/div&gt;&lt;br&gt;&lt;div&gt; &lt;b&gt;Academic qualifications and certifications:&lt;/b&gt; &lt;ul&gt; &lt;li&gt;Bachelor&apos;s degree or equivalent in either Information Systems or Computer Science or a related field.&lt;/li&gt; &lt;li&gt;Certifications in data management or related areas (e.g., DAMA Certified Data Management Professional) preferred.&lt;/li&gt; &lt;/ul&gt;&lt;/div&gt;&lt;br&gt;&lt;div&gt; &lt;b&gt;Required experience:&lt;/b&gt; &lt;ul&gt; &lt;li&gt;Seasoned experience working as a Data Governance Specialist or in a similar role.&lt;/li&gt; &lt;li&gt;Seasoned experience in developing and implementing data governance strategies, frameworks, and operating models.&lt;/li&gt; &lt;li&gt;Seasoned experience in developing training materials, conducting data governance workshops, and facilitating knowledge sharing sessions.&lt;/li&gt; &lt;li&gt;Seasoned experience with data management tools and technologies.&lt;/li&gt; &lt;li&gt;Seasoned experience in developing data governance frameworks, data dictionaries, data classification schemes, and data standards is desirable.&lt;/li&gt; &lt;li&gt;Seasoned experience in ensuring data governance practices align with regulatory guidelines and managing compliance audits.&lt;/li&gt; &lt;/ul&gt;&lt;/div&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;&lt;b&gt;&lt;span&gt;Workplace type&lt;/span&gt;: &lt;/b&gt;&lt;/p&gt;Remote Working&lt;p&gt;&lt;/p&gt;&lt;p&gt;&lt;b&gt;About NTT DATA&lt;/b&gt;&lt;br&gt;NTT DATA is a $30+ billion business and technology services leader, serving 75% of the Fortune Global 100. We are committed to accelerating client success and positively impacting society through responsible innovation. We are one of the worlds leading AI and digital infrastructure providers, with unmatched capabilities in enterprise-scale AI, cloud, security, connectivity, data centers and application services.&amp;nbsp; Our consulting and industry solutions help organizations and society move confidently and sustainably into the digital future. As a Global Top Employer, we have experts in more than 50 countries. We also offer clients access to a robust ecosystem of innovation centers as well as established and start-up partners. NTT DATA is part of NTT Group, which invests over $3 billion each year in R&amp;amp;D.&lt;/p&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;&lt;b&gt;Equal Opportunity Employer&lt;/b&gt;&lt;br&gt;NTT DATA is proud to be an Equal Opportunity Employer with a global culture that embraces diversity. We are committed to providing an environment free of unfair discrimination and harassment. We do not discriminate based on age, race, colour, gender, sexual orientation, religion, nationality, disability, pregnancy, marital status, veteran status, or any other protected category. Join our growing global team and accelerate your career with us. Apply today.&lt;/p&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;&lt;b&gt;Third parties fraudulently posing as NTT DATA recruiters&lt;/b&gt;&lt;/span&gt;&lt;span&gt;&amp;nbsp;&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;NTT DATA recruiters will never ask job seekers&lt;span&gt; or&lt;/span&gt; candidates for payment or banking information during the recruitment process, for any reason. Please remain vigilant of third parties &lt;span&gt;who may attempt to impersonate &lt;/span&gt;NTT DATA recruiterswhether in writing or by phonein order to deceptively obtain personal data or money from you. All email communications from an NTT DATA recruiter &lt;span&gt;will come from&lt;/span&gt; an &lt;b&gt;@nttdata.com&lt;/b&gt; email address. If you suspect any fraudulent activity, please &lt;/span&gt;&lt;i&gt;contact us&lt;/i&gt;.&lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;NTT DATA&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/813539/data-architect-at-ntt-data/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/813539/data-architect-at-ntt-data/</link>
  <title>[Full Time] Data Architect at NTT DATA</title>
  <dc:date>Wed, 25 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/813910/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;Project Role :&lt;/b&gt;Data Architect&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;b&gt;Must have skills :&lt;/b&gt;Data Architecture Principles&lt;b&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;18&lt;/b&gt; year(s) of experience is required&lt;b&gt;Educational Qualification :&lt;/b&gt; 15 years full time education&lt;b&gt;&lt;b&gt;Summary&lt;/b&gt;:&lt;/b&gt;&lt;br&gt; As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration. You will collaborate with various teams to ensure that the data architecture aligns with business objectives and technical specifications, facilitating seamless data flow and accessibility across the organization. You will also engage in discussions to refine data strategies and provide insights that enhance the overall data management framework, ensuring that the architecture supports current and future needs effectively. &lt;b&gt;Roles &amp;amp; Responsibilities:&lt;/b&gt; &lt;li&gt;Expected to be a Subject Matter Expert with deep knowledge and experience.&lt;/li&gt;&lt;li&gt;Should have influencing and advisory skills.&lt;/li&gt;&lt;li&gt;Engage with multiple teams and responsible for team decisions.&lt;/li&gt;&lt;li&gt;Expected to provide solutions to problems that apply across multiple teams, and provide solutions to business area problems.&lt;/li&gt;&lt;li&gt;Collaborate with stakeholders to gather and analyze data requirements, ensuring alignment with business goals.&lt;/li&gt;&lt;li&gt;Develop and maintain comprehensive documentation of data architecture, including data models, standards, and best practices. &lt;b&gt;Professional &amp;amp; Technical Skills:&lt;/b&gt; &lt;b&gt;Must To Have Skills:&lt;/b&gt;&lt;br&gt;&lt;/li&gt;&lt;li&gt; Proficiency in Data Architecture Principles.&lt;/li&gt;&lt;li&gt;Experience with data modeling tools and techniques.&lt;/li&gt;&lt;li&gt;Strong understanding of database management systems and data storage solutions.&lt;/li&gt;&lt;li&gt;Knowledge of data integration methods and ETL processes.&lt;/li&gt;&lt;li&gt;Familiarity with cloud-based data architecture and services. &lt;b&gt;Additional Information:&lt;/b&gt; &lt;/li&gt;&lt;li&gt;The candidate should have minimum 18 years of experience in Data Architecture Principles.&lt;/li&gt;&lt;li&gt;This position is based at our Pune office.&lt;/li&gt;&lt;li&gt;A 15 years full time education is required.&lt;b&gt;Qualification&lt;/b&gt;&lt;br&gt;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Pune&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/813910/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/813910/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Wed, 25 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/813586/data-engineer-a%c2%80%c2%93-microsoft-sql-server-at-zensar/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; &lt;strong&gt; Required Skills: &lt;/strong&gt; &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; Strong expertise in SQL Server, data extraction, and performance optimization. &lt;/li&gt; &lt;li&gt; Hands on experience with MinIO or similar S3 compatible object stores. &lt;/li&gt; &lt;li&gt; Proficiency in handling Parquet and high compression data workflows. &lt;/li&gt; &lt;li&gt; Experience with large scale data movement (multi TB to PB). &lt;/li&gt; &lt;li&gt; Strong understanding of data retention, archival strategies, and DLM best practices. &lt;/li&gt; &lt;li&gt; Ability to design resilient, fault tolerant, and fully auditable pipelines. &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; &lt;strong&gt; Preferred &lt;/strong&gt; &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; Experience in gaming, gambling, or high velocity transactional industries. &lt;/li&gt; &lt;li&gt; Knowledge of scripting/ETL frameworks (Python, Airflow, Spark, etc.). &lt;/li&gt; &lt;li&gt; Understanding of compliance and audit requirements for long term customer data storage &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; &lt;strong&gt; Required Skills: &lt;/strong&gt; &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; Strong expertise in SQL Server, data extraction, and performance optimization. &lt;/li&gt; &lt;li&gt; Hands on experience with MinIO or similar S3 compatible object stores. &lt;/li&gt; &lt;li&gt; Proficiency in handling Parquet and high compression data workflows. &lt;/li&gt; &lt;li&gt; Experience with large scale data movement (multi TB to PB). &lt;/li&gt; &lt;li&gt; Strong understanding of data retention, archival strategies, and DLM best practices. &lt;/li&gt; &lt;li&gt; Ability to design resilient, fault tolerant, and fully auditable pipelines. &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; &lt;strong&gt; Preferred &lt;/strong&gt; &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; Experience in gaming, gambling, or high velocity transactional industries. &lt;/li&gt; &lt;li&gt; Knowledge of scripting/ETL frameworks (Python, Airflow, Spark, etc.). &lt;/li&gt; &lt;li&gt; Understanding of compliance and audit requirements for long term customer data storage &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; &lt;strong&gt; Key Responsibilities : &lt;/strong&gt; &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; Design and implement an on prem data lake on MinIO for multi petabyte storage. &lt;br&gt; Build high efficiency pipelines to extract data from SQL Server and convert to Parquet (high compression, columnar format). &lt;/li&gt; &lt;li&gt; Implement Data Lifecycle Management (DLM) for 7 year retention, tiering, and archival policies. &lt;/li&gt; &lt;li&gt; Guarantee ultra high accuracy with strict zero tolerance for data loss across all customer datasets. &lt;/li&gt; &lt;li&gt; Optimize storage of mixed type datasets, including: &lt;/li&gt; &lt;li&gt; Transactional data (e.g., gaming ledger entries) &lt;/li&gt; &lt;li&gt; BLOB/large object data (e.g., game artifacts) &lt;/li&gt; &lt;li&gt; Monitor storage performance and ensure scalable, cost optimal on prem resource utilization. &lt;/li&gt; &lt;li&gt; Implement validation, reconciliation, and audit frameworks for end to end integrity. &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Zensar&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Pune&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/813586/data-engineer-a%c2%80%c2%93-microsoft-sql-server-at-zensar/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/813586/data-engineer-a%c2%80%c2%93-microsoft-sql-server-at-zensar/</link>
  <title>[Full Time] Data Engineer  Microsoft SQL Server at Zensar</title>
  <dc:date>Wed, 25 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/813755/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;Project Role :&lt;/b&gt;Data Architect&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;b&gt;Must have skills :&lt;/b&gt;AI &amp;amp; Data Solution Architecture&lt;b&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;12&lt;/b&gt; year(s) of experience is required&lt;b&gt;Educational Qualification :&lt;/b&gt; 15 years full time education&lt;b&gt;&lt;b&gt;Summary&lt;/b&gt;:&lt;/b&gt;&lt;br&gt; As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the architecture aligns with business needs and technical specifications. You will collaborate with various teams to ensure that data flows seamlessly across systems, while also addressing any challenges that arise in the data architecture process. Your role will be pivotal in shaping the data landscape of the organization, enabling effective data management and utilization. &lt;b&gt;Roles &amp;amp; Responsibilities:&lt;/b&gt; &lt;li&gt;Expected to be an SME.&lt;/li&gt;&lt;li&gt;Collaborate and manage the team to perform.&lt;/li&gt;&lt;li&gt;Responsible for team decisions.&lt;/li&gt;&lt;li&gt;Engage with multiple teams and contribute on key decisions.&lt;/li&gt;&lt;li&gt;Expected to provide solutions to problems that apply across multiple teams.&lt;/li&gt;&lt;li&gt;Facilitate knowledge sharing sessions to enhance team capabilities.&lt;/li&gt;&lt;li&gt;Develop and maintain documentation related to data architecture and design. &lt;b&gt;Professional &amp;amp; Technical Skills:&lt;/b&gt; &lt;b&gt;Must To Have Skills:&lt;/b&gt;&lt;br&gt;&lt;/li&gt;&lt;li&gt; Proficiency in Data &amp;amp; AI Solution Architecture.&lt;/li&gt;&lt;li&gt;Strong understanding of data modeling techniques and best practices.&lt;/li&gt;&lt;li&gt;Experience with cloud-based data solutions and architectures.&lt;/li&gt;&lt;li&gt;Familiarity with data integration tools and methodologies.&lt;/li&gt;&lt;li&gt;Ability to design scalable and efficient data storage solutions. &lt;b&gt;Additional Information:&lt;/b&gt; &lt;/li&gt;&lt;li&gt;The candidate should have minimum 12 years of experience in Data &amp;amp; AI Solution Architecture.&lt;/li&gt;&lt;li&gt;This position is based at our Bengaluru office.&lt;/li&gt;&lt;li&gt;A 15 years full time education is required.&lt;b&gt;Qualification&lt;/b&gt;&lt;br&gt;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/813755/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/813755/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Wed, 25 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/815765/manager-database-developer-clinical-database-management-at-pfizer/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt; &lt;span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;b&gt;ROLE SUMMARY&lt;/b&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt;As part of the Clinical Data Sciences (CDS) group, an integral delivery unit within the Clinical Development &amp;amp; Operations (CD&amp;amp;O) organization, the Manager, Project Manager, Clinical Database Management is responsible for the provision of project management leadership and expertise in Clinical Data Sciences with an emphasis on clinical databases and related technologies supporting assigned segments of the Pfizer portfolio. Accountabilities include project management and leadership of the design, development, and maintenance of clinical databases and other technical deliverables within Clinical Data Sciences. Oversees the development cycle/change control of database build to ensure the integrity of clinical data and the application of Pfizer standards supporting consistency in asset/submission data. Serves as a Subject Matter Expert for database build in the assigned Therapy Area. The Project Manager, Clinical Database Management works closely with the Clinical Data Scientist to ensure consistent, timely and high quality application of process and delivery of CDS responsibilities. The Project Manager, Clinical Database Management will be accountable to assure process, technologies, and standards are leveraged in a consistent way across assigned projects. May mentor junior staff members &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;b&gt;ROLE RESPONSIBILITIES &lt;/b&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Works closely with department roles, assigned mentor, and cross-functional study team members to manage the development cycle of data capture solutions that support the quality and timely delivery of data required per standard and study specific data review plans. &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Solves moderately complex problems related to database build and implementation &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;May aid with solving problems outside of own department &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Oversees operational activities/projects of professional Work Teams to support short-term Department goals. &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Leads internal initiatives, working with other roles within CDS to improve processes. Represents Database Management on CDS initiatives. &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Complies with applicable SOPs and work practices &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Serve as a project management resource to the study teams for developing and implementing database development project plans. &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Ensure proper planning of study activities in DBM and proactively alert risk and plan mitigation. &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Independently, perform Impact analysis for proposed solutions to existing tools and processes and convey the same to technical and non-technical stakeholders. &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Act as an Expert in the area, applying best practices according to documented processes &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Participate in Pfizer Standards meetings as appropriate &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Ensure compliance always &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Ensure seamless functioning and collaboration of CDS activities between the DBM and Clinical Data Scientist functions. &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Ensure operational excellence in collaboration with partners and colleagues for application of standards within data capture methods in support of the data review plan, in collaboration with the Clinical Data Scientist, ensure operational excellence across all CDS deliverables &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Assess impacts of Change Control and develop mitigation plans for emerging risks and issues. &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Identifies existing process/product improvements &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Develops innovative, advanced new concepts that improve processes / products across own and related disciplines &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt;Takes appropriate risks to achieve desired result &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt;BASIC QUALIFICATIONS &lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Bachelors degree in Life Sciences, Computer Science, or equivalent &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Minimum of 10 years of relevant project management experience in a pharmaceutical, biotech, CRO, or Regulatory Agency with an emphasis on building data collection and assimilation solutions &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Strong project management, communication (written and oral), decision-making, influencing, and negotiation skills &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Familiarity with Electronic Data Capture systems, Clinical Data Management Systems/relational databases (e.g. Oracle InForm and Data Management Workbench, MS SQL Server or MS Access) and data visualization tools (e.g. Spotfire, J-Review) &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Experience with MSProject or other enterprise project management tools &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt;PREFERRED QUALIFICATIONS&lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Understanding of drug development process and data operations required for the reporting of clinical trial data (e.g. data review, study reports, regulatory submissions, safety updates, etc.) &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Understanding of regulatory requirements and relevant data standards; CDISC knowledge and experience are preferable &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Proficiency in the use of Microsoft Office Suite of tools (Outlook, Word, Excel, etc.) &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; &lt;span&gt; &lt;/span&gt; &lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt;PHYSICAL/MENTAL REQUIREMENTS&lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Primarily an office-based position involving sitting in front of a computer for large periods of work time, making presentations, etc. &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;br&gt;Work Location Assignment: Hybrid&lt;br&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Pfizer is an equal opportunity employer and complies with all applicable equal employment opportunity legislation in each jurisdiction in which it operates.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; Medical &lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Pharmaceutical &amp;amp; Life Sciences&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Developer / Engineer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Pfizer&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Chennai&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/815765/manager-database-developer-clinical-database-management-at-pfizer/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/815765/manager-database-developer-clinical-database-management-at-pfizer/</link>
  <title>[Full Time] Manager, Database Developer, Clinical Database Management at Pfizer</title>
  <dc:date>Wed, 25 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/813682/oracle-empirica-implementation-consultant-at-zensar/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;span&gt; &lt;span&gt; Seeking an experienced &lt;strong&gt; Oracle Empirica Implementation Consultants &lt;/strong&gt; to support the implementation, customization, and optimization of Oracle Empirica a leading pharmacovigilance and signal detection solution. This role is critical to ensuring the successful deployment of Oracle Empirica within customer s operational framework, enabling enhanced safety monitoring, compliance, and decision-making capabilities. &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; The ideal candidate will bring deep expertise in Oracle Empirica, hands-on implementation experience, and a strong understanding of pharmacovigilance processes and regulatory requirements. It involves collaboration with cross-functional teams, including IT, and business stakeholders. &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; &lt;strong&gt; Key Responsibilities &lt;/strong&gt; &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;strong&gt; Implementation and Configuration &lt;/strong&gt; : Take a leading role in the end-to-end implementation of Oracle Empirica, including system setup, configuration, and integration with existing Cencora systems. &lt;/li&gt; &lt;li&gt; &lt;strong&gt; Requirements Gathering &lt;/strong&gt; : Collaborate with stakeholders to understand business needs, define technical requirements, and translate them into actionable implementation plans. &lt;/li&gt; &lt;li&gt; &lt;strong&gt; Customization &lt;/strong&gt; : Tailor Oracle Empirica functionalities to align with customer pharmacovigilance workflows, regulatory compliance standards, and business objectives. &lt;/li&gt; &lt;li&gt; &lt;strong&gt; Data Integration &lt;/strong&gt; : Ensure seamless integration of Oracle Empirica with internal databases, data warehouses, and third-party systems for efficient signal detection and reporting. &lt;/li&gt; &lt;li&gt; &lt;strong&gt; Testing and Validation &lt;/strong&gt; : Participate in rigorous system testing, user acceptance testing (UAT), and validation to ensure the solution meets predefined requirements and performs optimally. &lt;/li&gt; &lt;li&gt; &lt;strong&gt; Training and Support &lt;/strong&gt; : Provide training to end-users, develop documentation, and offer ongoing technical support to ensure successful adoption and usage of Oracle Empirica. &lt;/li&gt; &lt;li&gt; &lt;strong&gt; Compliance and Best Practices &lt;/strong&gt; : Ensure the implementation aligns with global pharmacovigilance regulations (e.g., FDA, EMA, ICH guidelines) and industry best practices. &lt;/li&gt; &lt;li&gt; &lt;strong&gt; Project Management &lt;/strong&gt; : contribute to the management of project timelines, deliverables, and risks, ensuring milestones are met and stakeholders are informed of progress. &lt;/li&gt; &lt;li&gt; &lt;strong&gt; Troubleshooting &lt;/strong&gt; : Identify and resolve technical issues during implementation and post-deployment phases. &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; &lt;strong&gt; Qualifications &lt;/strong&gt; &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; &lt;strong&gt; Required Skills and Experience &lt;/strong&gt; &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; Proven experience in implementing Oracle Empirica, including signal detection, data mining, and adverse event reporting modules. &lt;/li&gt; &lt;li&gt; Strong understanding of pharmacovigilance processes, regulatory requirements, and compliance standards (e.g., FDA, EMA, ICH). &lt;/li&gt; &lt;li&gt; Expertise in system integration, including database management (e.g., Oracle, SQL), ETL processes, and API integrations. &lt;/li&gt; &lt;li&gt; Hands-on experience with system testing, validation, and user acceptance testing (UAT). &lt;/li&gt; &lt;li&gt; Proficiency in managing large-scale implementation projects with cross-functional teams. &lt;/li&gt; &lt;li&gt; Excellent problem-solving skills and ability to troubleshoot technical issues. &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; &lt;strong&gt; Preferred Skills &lt;/strong&gt; &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; Familiarity with other Oracle Health Sciences solutions, such as Argus Safety. &lt;/li&gt; &lt;li&gt; Knowledge of cloud-based deployment models and Oracle Cloud infrastructure. &lt;/li&gt; &lt;li&gt; Experience in the pharmaceutical or healthcare industry is highly desirable. &lt;/li&gt; &lt;li&gt; Strong communication skills to liaise effectively with technical and non-technical stakeholders. &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; &lt;strong&gt; Education and Certifications &lt;/strong&gt; &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; Bachelor s degree in Computer Science, Information Systems, Life Sciences, or a related field (Master s degree preferred). &lt;/li&gt; &lt;li&gt; Oracle certifications related to Empirica or Oracle Health Sciences Suite are a plus. &lt;/li&gt; &lt;li&gt; Project management certifications (e.g., PMP, Agile) are an advantage. &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; &lt;strong&gt; Qualifications &lt;/strong&gt; &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; &lt;strong&gt; Required Skills and Experience &lt;/strong&gt; &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; Proven experience in implementing Oracle Empirica, including signal detection, data mining, and adverse event reporting modules. &lt;/li&gt; &lt;li&gt; Strong understanding of pharmacovigilance processes, regulatory requirements, and compliance standards (e.g., FDA, EMA, ICH). &lt;/li&gt; &lt;li&gt; Expertise in system integration, including database management (e.g., Oracle, SQL), ETL processes, and API integrations. &lt;/li&gt; &lt;li&gt; Hands-on experience with system testing, validation, and user acceptance testing (UAT). &lt;/li&gt; &lt;li&gt; Proficiency in managing large-scale implementation projects with cross-functional teams. &lt;/li&gt; &lt;li&gt; Excellent problem-solving skills and ability to troubleshoot technical issues. &lt;br&gt; &lt;span&gt; &lt;span&gt; &lt;strong&gt; Education and Certifications &lt;/strong&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; Bachelor s degree in Computer Science, Information Systems, Life Sciences, or a related field (Master s degree preferred). &lt;/li&gt; &lt;li&gt; Oracle certifications related to Empirica or Oracle Health Sciences Suite are a plus. &lt;/li&gt; &lt;li&gt; Project management certifications (e.g., PMP, Agile) are an advantage. &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; &lt;strong&gt; Key Responsibilities &lt;/strong&gt; &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;strong&gt; Implementation and Configuration &lt;/strong&gt; : Take a leading role in the end-to-end implementation of Oracle Empirica, including system setup, configuration, and integration with existing Cencora systems. &lt;/li&gt; &lt;li&gt; &lt;strong&gt; Requirements Gathering &lt;/strong&gt; : Collaborate with stakeholders to understand business needs, define technical requirements, and translate them into actionable implementation plans. &lt;/li&gt; &lt;li&gt; &lt;strong&gt; Customization &lt;/strong&gt; : Tailor Oracle Empirica functionalities to align with customer pharmacovigilance workflows, regulatory compliance standards, and business objectives. &lt;/li&gt; &lt;li&gt; &lt;strong&gt; Data Integration &lt;/strong&gt; : Ensure seamless integration of Oracle Empirica with internal databases, data warehouses, and third-party systems for efficient signal detection and reporting. &lt;/li&gt; &lt;li&gt; &lt;strong&gt; Testing and Validation &lt;/strong&gt; : Participate in rigorous system testing, user acceptance testing (UAT), and validation to ensure the solution meets predefined requirements and performs optimally. &lt;/li&gt; &lt;li&gt; &lt;strong&gt; Training and Support &lt;/strong&gt; : Provide training to end-users, develop documentation, and offer ongoing technical support to ensure successful adoption and usage of Oracle Empirica. &lt;/li&gt; &lt;li&gt; &lt;strong&gt; Compliance and Best Practices &lt;/strong&gt; : Ensure the implementation aligns with global pharmacovigilance regulations (e.g., FDA, EMA, ICH guidelines) and industry best practices. &lt;/li&gt; &lt;li&gt; &lt;strong&gt; Project Management &lt;/strong&gt; : contribute to the management of project timelines, deliverables, and risks, ensuring milestones are met and stakeholders are informed of progress. &lt;/li&gt; &lt;li&gt; &lt;strong&gt; Troubleshooting &lt;/strong&gt; : Identify and resolve technical issues during implementation and post-deployment phases. &lt;/li&gt; &lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Analyst&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Zensar&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Pune&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/813682/oracle-empirica-implementation-consultant-at-zensar/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/813682/oracle-empirica-implementation-consultant-at-zensar/</link>
  <title>[Full Time] Oracle Empirica Implementation Consultant at Zensar</title>
  <dc:date>Wed, 25 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/813737/jira-admin-support-at-zensar/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;Key Responsibilities:&lt;/p&gt; &lt;p&gt;System Administration: Configure Jira schemes (projects, workflows, fields, screens, permissions) and perform routine maintenance.&lt;/p&gt; &lt;p&gt;Customization &amp;amp; Automation: Develop scripts (e.g., Groovy) for automating tasks and enhancing functionality.&lt;/p&gt; &lt;p&gt;Troubleshooting &amp;amp; Support: Monitor system performance, resolve user issues, and manage integrations with tools like Confluence or Bitbucket.&lt;/p&gt; &lt;p&gt;Training &amp;amp; Best Practices: Provide support, conduct workshops, and document procedures for users.&lt;/p&gt; &lt;p&gt;Upgrades &amp;amp; Security: Patch systems, perform migrations, and ens&lt;span&gt;ure data security.&lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt;Required Qualifications &amp;amp; Skills:&lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt;Experience: 3-5 years of experience as a Jira Administrator, ideally managing large-scale, enterprise instances.&lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt;Technical Skills: Deep understanding of JIRA Configuration, Workflows, ScriptRunner/JSO Automation, and JQL.&lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt;Methodologies: Strong knowledge of Agile (Scrum/Kanban) processes.&lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt;Soft Skills: Strong communication, analytical, and problem-solving abilities&lt;/span&gt; &lt;/p&gt; &lt;p&gt;Key Responsibilities:&lt;/p&gt; &lt;p&gt;System Administration: Configure Jira schemes (projects, workflows, fields, screens, permissions) and perform routine maintenance.&lt;/p&gt; &lt;p&gt;Customization &amp;amp; Automation: Develop scripts (e.g., Groovy) for automating tasks and enhancing functionality.&lt;/p&gt; &lt;p&gt;Troubleshooting &amp;amp; Support: Monitor system performance, resolve user issues, and manage integrations with tools like Confluence or Bitbucket.&lt;/p&gt; &lt;p&gt;Training &amp;amp; Best Practices: Provide support, conduct workshops, and document procedures for users.&lt;/p&gt; &lt;p&gt;Upgrades &amp;amp; Security: Patch systems, perform migrations, and ens&lt;span&gt;ure data security.&lt;/span&gt; &lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Zensar&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Pune&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/813737/jira-admin-support-at-zensar/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/813737/jira-admin-support-at-zensar/</link>
  <title>[Full Time] Jira Admin Support at Zensar</title>
  <dc:date>Wed, 25 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814339/data-architect-at-leading-client/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;Location: Remote-&lt;span&gt;Delhi / NCR,Bangalore/Bengaluru,Hyderabad/Secunderabad,Chennai,Pune,Kolkata,Ahmedabad,Mumbai&lt;/span&gt;&lt;/p&gt;Notice Period: ImmediateiSource Services is hiring for one of their client for the position of Data Architect.About the Role - &lt;br&gt;Experience in architecting with AWS or Azure Cloud Data Platform&lt;br&gt;Successfully implemented large scale data warehouse data lake solutions in snowflake or AWS Redshift&lt;br&gt;Be proficient in Data modelling and data architecture design experienced in reviewing 3rd Normal Form and Dimensional models.&lt;br&gt;Implementing Master data management, process design and implementation&lt;br&gt;Implementing Data quality solutions including processes&lt;br&gt;IOT Design using AWS or Azure Cloud platforms&lt;br&gt;Designing and implementing machine learning solutions as part of high-volume data ingestion and transformation&lt;br&gt;Working with structured and unstructured data including geo-spatial data&lt;br&gt;Experience in technologies like python, SQL, no SQL, KAFKA, Elastic Search&lt;br&gt;Experience using snowflake, informatica, azure logic apps, azure functions, azure storage, azure data lake and azure search.&lt;br&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Leading Client&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Kolkata&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814339/data-architect-at-leading-client/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814339/data-architect-at-leading-client/</link>
  <title>[Full Time] Data Architect at Leading Client</title>
  <dc:date>Tue, 24 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814899/data-modeler-architect-at-barclays/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;Join us as a &lt;b&gt;Data Modeller &lt;/b&gt;in Barclays, responsible for supporting the successful delivery of Location Strategy projects to plan, budget, agreed quality and governance standards. Youll spearhead the evolution of our digital landscape, driving innovation and excellence. You will harness cutting-edge technology to revolutionise our digital offerings, ensuring unparalleled customer experiences.&lt;p&gt;&lt;/p&gt; &lt;p&gt;To be successful as a &lt;b&gt;Data Modeller &lt;/b&gt;you should have experience with: -&lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Data Modelling Techniques&lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Conceptual, Logical, and Physical data modelling&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Normalization / Denormalization&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Dimensional modelling (Star/Snowflake schema)&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Entity-Relationship (ER) modelling&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Data Modelling Tools&lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;ER/Studio&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Erwin Data Modeler&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Database Systems&lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Relational: Oracle, SQL Server, PostgreSQL, MySQL&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Columnar: Amazon Redshift, Snowflake&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;NoSQL: MongoDB, Cassandra (for some use cases)&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;SQL and Scripting&lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Advanced SQL (DDL, DML, performance tuning)&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;PL/SQL or T-SQL&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Python or Shell scripting for data validation and automation&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Data Warehousing &amp;amp; ETL&lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Data warehousing concepts (Inmon, Kimball)&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;ETL tools: Informatica etc&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Cloud Platforms (increasingly common)&lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;AWS (especially S3, Redshift, Glue)&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Lake formation / Data Lake implementation &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Investment Banking Data Concepts&lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Domains such as Deal, CRM, Instruments (equities, bonds) etc&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Analytical and transactional data&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Reference data (e.g., security master, client reference, legal entities)&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Market Data Vendors&lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Understanding of identifiers: ISIN, CUSIP, SEDOL, RIC&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Familiarity with Bloomberg, FactSet, Refinitiv, Dealogic, etc.&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Data Governance &amp;amp; Lineage&lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Metadata management&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Business glossary&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Experience with tooling- DBT, Snowflake, Databricks, Immuta, Alation, Informatica, colibra etc&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Requirements Gathering&lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Working with business analysts, product managers, and stakeholders&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Translating business requirements into data models&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Data Quality &amp;amp; Validation&lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Profiling data&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Designing controls for accuracy, completeness, and consistency&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Performance Optimization&lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Indexing strategy&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Partitioning, clustering&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Query tuning for large datasets&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt;Some other highly valued skills may include: -&lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Stakeholder Communication&lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Explaining complex data structures to non-technical stakeholders&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Cross-functional collaboration with engineering, business, and compliance teams&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Documentation&lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Creating and maintaining model dictionaries, lineage diagrams, etc.&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Agile/Project Management&lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Working in Agile/Scrum or Kanban teams&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Using tools like JIRA or Confluence&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt;You may be assessed on the key critical skills relevant for success in role, such as risk and controls, change and transformation, business acumen strategic thinking and digital and technology, as well as job-specific technical skills.&lt;/p&gt; &lt;p&gt;This role is based in Pune.&lt;/p&gt; &lt;p&gt; &lt;b&gt;Purpose of the role&lt;/b&gt; &lt;/p&gt; &lt;p&gt;To design, implement, and maintain conceptual, Logical and Physical data models that meet business data/process and technology requirements, by using designs and data strategies across a wide selection of platforms. &lt;/p&gt; &lt;p&gt; &lt;b&gt;Accountabilities&lt;/b&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt;Analysis and documentation of business requirements to translate them into data models aligned with organisational goals.&lt;/li&gt; &lt;li&gt;Development and maintenance of data dictionaries and glossaries to define data elements and their usage.&lt;/li&gt; &lt;li&gt;Analysis and monitoring of data usage patterns to identify opportunities for data optimisation and improvement, in partnership with the Data Base Administrator.&lt;/li&gt; &lt;li&gt;Strategic architecture definition and product selection.&lt;/li&gt; &lt;li&gt;Production of logical designs in relevant subject area (technical, data, operational), showing for example: processes, objects, data flows, inputs, stored data and outputs. Identifying common components.&lt;/li&gt; &lt;li&gt;Implementation of architectures and Identification, ownership and resolution of design related issues.&lt;/li&gt; &lt;li&gt;Definition and documentation of data architectures standards, principles and strategies.&lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;b&gt;Assistant Vice President Expectations&lt;/b&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt;To advise and influence decision making, contribute to policy development and take responsibility for operational effectiveness. Collaborate closely with other functions/ business divisions.&lt;/li&gt; &lt;li&gt;Lead a team performing complex tasks, using well developed professional knowledge and skills to deliver on work that impacts the whole business function. Set objectives and coach employees in pursuit of those objectives, appraisal of performance relative to objectives and determination of reward outcomes&lt;/li&gt; &lt;li&gt;If the position has leadership responsibilities, People Leaders are expected to demonstrate a clear set of leadership behaviours to create an environment for colleagues to thrive and deliver to a consistently excellent standard. The four LEAD behaviours are: L Listen and be authentic, E Energise and inspire, A Align across the enterprise, D Develop others.&lt;/li&gt; &lt;li&gt;OR for an individual contributor, they will lead collaborative assignments and guide team members through structured assignments, identify the need for the inclusion of other areas of specialisation to complete assignments. They will identify new directions for assignments and/ or projects, identifying a combination of cross functional methodologies or practices to meet required outcomes.&lt;/li&gt; &lt;li&gt;Consult on complex issues; providing advice to People Leaders to support the resolution of escalated issues.&lt;/li&gt; &lt;li&gt;Identify ways to mitigate risk and developing new policies/procedures in support of the control and governance agenda.&lt;/li&gt; &lt;li&gt;Take ownership for managing risk and strengthening controls in relation to the work done.&lt;/li&gt; &lt;li&gt;Perform work that is closely related to that of other areas, which requires understanding of how areas coordinate and contribute to the achievement of the objectives of the organisation sub-function.&lt;/li&gt; &lt;li&gt;Collaborate with other areas of work, for business aligned support areas to keep up to speed with business activity and the business strategy.&lt;/li&gt; &lt;li&gt;Engage in complex analysis of data from multiple sources of information, internal and external sources such as procedures and practises (in other areas, teams, companies, etc).to solve problems creatively and effectively.&lt;/li&gt; &lt;li&gt;Communicate complex information. Complex information could include sensitive information or information that is difficult to communicate because of its content or its audience.&lt;/li&gt; &lt;li&gt;Influence or convince stakeholders to achieve outcomes.&lt;/li&gt; &lt;/ul&gt; &lt;p&gt;All colleagues will be expected to demonstrate the Barclays Values of Respect, Integrity, Service, Excellence and Stewardship our moral compass, helping us do what we believe is right. They will also be expected to demonstrate the Barclays Mindset to Empower, Challenge and Drive the operating manual for how we behave.&lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Financial Services&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Barclays&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Pune&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814899/data-modeler-architect-at-barclays/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814899/data-modeler-architect-at-barclays/</link>
  <title>[Full Time] Data Modeler / Architect at Barclays</title>
  <dc:date>Tue, 24 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814029/lead-data-engineer-avp-at-hdfc-bank/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&lt;strong&gt;Job Role&lt;/strong&gt; - Lead Azure Data Engineer (Asst. VP)&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Location&lt;/strong&gt; - Navi Mumbai / Bangalore / Noida &amp;amp; Gurugram&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Experience&lt;/strong&gt; - 11 Years to 14 Years&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Job Responsibilities: &lt;/strong&gt;&lt;br&gt;&lt;/p&gt;&lt;ol type=&quot;1&quot;&gt;&lt;li&gt;Fundamentals of DevOps, DevSecOps, CD / CI Pipeline using ADO&lt;/li&gt;&lt;li&gt;Good understanding of MPP Architecture, MySQL, RDS, MS&lt;/li&gt;&lt;li&gt;SQL DB, Oracle ,Postgres DB&lt;/li&gt;&lt;li&gt;Would need to interact with Software Integrators on a day-today basis.&lt;/li&gt;&lt;li&gt;Deployment and testing skills&lt;/li&gt;&lt;li&gt;Strong communication skills&lt;/li&gt;&lt;li&gt;ELT - Trino, Azure Data factory, Azure Databricks, PySpark, Python, Iceberg, Parquet&lt;/li&gt;&lt;li&gt;CDC Tool like Qlik/ Golden Gate/Dbsium/IBM CDC, Kafka/ Solace  Scripting Shell, Python, Java,&lt;/li&gt;&lt;li&gt;Good Understanding of Azure Cloud Engineering ADLS, Iceberg, Databricks, AKS, RHEL&lt;/li&gt;&lt;li&gt;Good understanding of MS Project&lt;/li&gt;&lt;li&gt;Development skill using Trino, PySpark and Databricks&lt;/li&gt;&lt;li&gt;Understanding of security basics, Encryption/Decryption,&lt;/li&gt;&lt;li&gt;Understanding of IT hardware basics: Unix/Windows servers, RAM/CPU utilization, storage on cloud&lt;/li&gt;&lt;li&gt;Basic project management skills for preparation of a high-level project plan.&lt;/li&gt;&lt;li&gt;Understanding of DNS and Load Balancing, and their use.&lt;/li&gt;&lt;li&gt;&amp;nbsp;Understanding of DR/BCP/Recovery/Backup conceptually for DB and Apply Servers&lt;/li&gt;&lt;/ol&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Banking&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Hdfc Bank&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Noida, Gurugram&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814029/lead-data-engineer-avp-at-hdfc-bank/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814029/lead-data-engineer-avp-at-hdfc-bank/</link>
  <title>[Full Time] Lead Data Engineer (AVP) at Hdfc Bank</title>
  <dc:date>Tue, 24 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/814935/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;Project Role :&lt;/b&gt;Data Architect&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;b&gt;Must have skills :&lt;/b&gt;Snowflake Data Warehouse&lt;b&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;18&lt;/b&gt; year(s) of experience is required&lt;b&gt;Educational Qualification :&lt;/b&gt; 15 years full time education&lt;b&gt;&lt;b&gt;Summary&lt;/b&gt;:&lt;/b&gt;&lt;br&gt; As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration. You will collaborate with various teams to ensure that the data architecture aligns with business objectives and supports the overall application functionality. You will also engage in discussions to refine data strategies and provide insights that enhance data management practices across the organization. &lt;b&gt;Roles Responsibilities:&lt;/b&gt; &lt;li&gt;Expected to be a Subject Matter Expert with deep knowledge and experience.&lt;/li&gt;&lt;li&gt;Should have influencing and advisory skills.&lt;/li&gt;&lt;li&gt;Engage with multiple teams and be responsible for team decisions.&lt;/li&gt;&lt;li&gt;Expected to provide solutions to problems that apply across multiple teams, and provide solutions to business area problems.&lt;/li&gt;&lt;li&gt;Facilitate workshops and meetings to gather requirements and ensure alignment among stakeholders.&lt;/li&gt;&lt;li&gt;Develop and maintain documentation related to data architecture and design decisions. &lt;b&gt;Professional Technical Skills:&lt;/b&gt;&lt;br&gt;&lt;b&gt;Must To Have Skills:&lt;/b&gt;&lt;br&gt;&lt;/li&gt;&lt;li&gt; Proficiency in Snowflake Data Warehouse.&lt;/li&gt;&lt;li&gt;Experience with data modeling and database design principles.&lt;/li&gt;&lt;li&gt;Strong understanding of data integration techniques and ETL processes.&lt;/li&gt;&lt;li&gt;Familiarity with cloud data platforms and services.&lt;/li&gt;&lt;li&gt;Knowledge of data governance and data quality best practices. &lt;b&gt;Additional Information:&lt;/b&gt; &lt;/li&gt;&lt;li&gt;The candidate should have minimum 18 years of experience in Snowflake Data Warehouse.&lt;/li&gt;&lt;li&gt;This position is based at our Hyderabad office.&lt;/li&gt;&lt;li&gt;A 15 years full time education is required.&lt;b&gt;Qualification&lt;/b&gt; 15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/814935/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/814935/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Tue, 24 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/818840/digital-manufacturing-it-application-analyst-senior-at-cummins/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;The Manufacturing IT Application Analyst Senior provides application and infrastructure technical expertise, analysis, and specifications for IT systems to meet business requirements in accordance with Cummins IT architecture policies and standards. This role translates requirements into technical specifications, creates detailed solution designs, and coordinates the construction, installation, configuration, and testing of IT systems. The analyst will also identify, troubleshoot, and resolve complex technical issues while supporting manufacturing-focused digital solutions.&lt;/p&gt;Key Responsibilities &lt;ul&gt; &lt;li&gt;Translate business requirements into detailed technical specifications and solution designs.&lt;/li&gt; &lt;li&gt;Conduct technical analysis of potential solutions, including commercial off-the-shelf (COTS) products versus custom-built solutions.&lt;/li&gt; &lt;li&gt;Deliver solution designs adhering to Cummins architecture, security, and performance standards while ensuring scalability and maintainability.&lt;/li&gt; &lt;li&gt;Develop infrastructure or application components including interfaces, conversions, reports, and workflows.&lt;/li&gt; &lt;li&gt;Collaborate with IT technical service providers and ensure outsourced work packages are delivered on time, within cost, and to required quality standards.&lt;/li&gt; &lt;li&gt;Participate in design and code reviews, ensuring compliance with standards and technical specifications.&lt;/li&gt; &lt;li&gt;Leverage reusable components and automation to reduce costs and improve efficiency in build and deployment processes.&lt;/li&gt; &lt;li&gt;Assist in test strategy development, execution of master test plans, and solution validation testing.&lt;/li&gt; &lt;li&gt;Contribute to the creation of IT standards, processes, procedures, and end-user runbooks.&lt;/li&gt; &lt;li&gt;Analyze and revise existing systems to improve functionality, performance, and maintainability.&lt;/li&gt; &lt;li&gt;Provide &lt;strong&gt;Level 3 support&lt;/strong&gt; for critical system issues impacting manufacturing IT systems.&lt;/li&gt; &lt;/ul&gt; Required Qualifications, Skills, and Experience &lt;b&gt;Core Skills &amp;amp; Knowledge&lt;/b&gt; &lt;ul&gt; &lt;li&gt;Minimum &lt;b&gt;3 years of IT experience&lt;/b&gt; focused on system development and analysis.&lt;/li&gt; &lt;li&gt;Proficiency in &lt;b&gt;Python, Machine Learning, Ignition, and Plotly-Dash&lt;/b&gt;.&lt;/li&gt; &lt;li&gt;Hands-on experience with &lt;b&gt;Microsoft utilities&lt;/b&gt; such as Azure Cloud, Power BI, and PowerApps.&lt;/li&gt; &lt;li&gt;Strong technical expertise in &lt;b&gt;interactive dashboard development&lt;/b&gt; and data visualization.&lt;/li&gt; &lt;li&gt;Understanding of &lt;b&gt;manufacturing data flows, system architecture, and cloud-based deployment&lt;/b&gt;.&lt;/li&gt; &lt;li&gt;Knowledge of &lt;b&gt;API development and secure data exchange&lt;/b&gt;.&lt;/li&gt; &lt;/ul&gt; &lt;b&gt;Preferred Qualifications&lt;/b&gt; &lt;ul&gt; &lt;li&gt;Educational or professional background in &lt;b&gt;manufacturing environments&lt;/b&gt;.&lt;/li&gt; &lt;li&gt;Experience with &lt;b&gt;PLC, OPC-UA, and HMI systems&lt;/b&gt;.&lt;/li&gt; &lt;li&gt;Exposure to &lt;b&gt;Artificial Intelligence (AI) technologies&lt;/b&gt;.&lt;/li&gt; &lt;/ul&gt; &lt;b&gt;Education, Licenses, Certifications&lt;/b&gt; &lt;ul&gt; &lt;li&gt;College, university, or equivalent degree in &lt;b&gt;Computer Science, Information Technology, Business, or a related field&lt;/b&gt;, or equivalent relevant experience.&lt;/li&gt; &lt;/ul&gt; &lt;strong&gt;Experience&lt;/strong&gt; &lt;ul&gt; &lt;li&gt;Intermediate-level experience required: &lt;strong&gt;3 5 years&lt;/strong&gt; of relevant professional experience.&lt;/li&gt; &lt;/ul&gt; Competencies &lt;ul&gt; &lt;li&gt; &lt;strong&gt;Customer Focus&lt;/strong&gt; Building strong relationships and delivering customer-centric solutions.&lt;/li&gt; &lt;li&gt; &lt;strong&gt;Global Perspective&lt;/strong&gt; Approaching challenges with a broad, international outlook.&lt;/li&gt; &lt;li&gt; &lt;strong&gt;Manages Complexity&lt;/strong&gt; Analyzing large volumes of information to effectively solve problems.&lt;/li&gt; &lt;li&gt; &lt;strong&gt;Manages Conflict&lt;/strong&gt; Resolving disagreements constructively and with minimal disruption.&lt;/li&gt; &lt;li&gt; &lt;strong&gt;Optimizes Work Processes&lt;/strong&gt; Driving continuous improvement and efficiency.&lt;/li&gt; &lt;li&gt; &lt;strong&gt;Data Modeling&lt;/strong&gt; Creating and testing data models/scripts with proper governance and compliance.&lt;/li&gt; &lt;li&gt; &lt;strong&gt;Performance Tuning&lt;/strong&gt; Solving application, database, and hardware issues using industry-standard methods.&lt;/li&gt; &lt;li&gt; &lt;strong&gt;Solution Configuration &amp;amp; Design&lt;/strong&gt; Configuring and designing COTS and custom solutions with scalability and compliance.&lt;/li&gt; &lt;li&gt; &lt;strong&gt;Solution Functional Fit Analysis&lt;/strong&gt; Ensuring component integration and holistic system performance.&lt;/li&gt; &lt;li&gt; &lt;strong&gt;Solution Modeling &amp;amp; Validation Testing&lt;/strong&gt; Designing, documenting, and validating IT solutions per SDLC standards.&lt;/li&gt; &lt;li&gt; &lt;strong&gt;Values Differences&lt;/strong&gt; Leveraging diverse perspectives and cultures for organizational success.&lt;/li&gt; &lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Miscellaneous&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Analyst&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Cummins&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Kolkata&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/818840/digital-manufacturing-it-application-analyst-senior-at-cummins/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/818840/digital-manufacturing-it-application-analyst-senior-at-cummins/</link>
  <title>[Full Time] Digital Manufacturing IT Application Analyst - Senior at Cummins</title>
  <dc:date>Tue, 24 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/816526/application-developer-oracle-cloud-integration-at-ibm/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt;&lt;p&gt;As a Software Developer you&apos;&apos;ll participate in many aspects of the software development lifecycle, such as design, code implementation, testing, and support. You will create software that enables your clients&apos;&apos; hybrid-cloud and AI journeys. &lt;b&gt;Your primary responsibilities include:&lt;/b&gt; Comprehensive Feature Development and Issue Resolution: Working on the end to end feature development and solving challenges faced in the implementation. Stakeholder Collaboration and Issue Resolution: Collaborate with key stakeholders, internal and external, to understand the problems, issues with the product and features and solve the issues as per SLAs defined. Continuous Learning and Technology Integration: Being eager to learn new technologies and implementing the same in feature development.&lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;div&gt;&lt;b&gt;Required education&lt;/b&gt;&lt;/div&gt;&lt;div&gt; Bachelor&apos;&apos;s Degree &lt;/div&gt;&lt;/div&gt;&lt;div&gt;&lt;div&gt;&lt;b&gt;Required technical and professional expertise&lt;/b&gt;&lt;/div&gt;&lt;div&gt;&lt;ul&gt;&lt;li&gt;Should have minimum 3 or more years of relevant experience in ODI(Oracle Database Integrator) 12c Development and Implementation.&lt;/li&gt;&lt;li&gt;Should have good knowledge of integrating with Web Services, XML(Extensible Markup Language) and other API(Application Programming Interface) to transfer the data - from source and target, in addition to database.&lt;/li&gt;&lt;li&gt;Should have hands on experience in complex data migration between heterogeneous large complex databases (Oracle database is must)&lt;/li&gt;&lt;/ul&gt;&lt;/div&gt;&lt;/div&gt;&lt;div&gt;&lt;div&gt;&lt;b&gt;Preferred technical and professional experience&lt;/b&gt;&lt;/div&gt;&lt;div&gt;&lt;ul&gt;&lt;li&gt;Exposure in risks management and resolving issues that affect release scope.&lt;/li&gt;&lt;li&gt;Ability to maintain quality and bring potential solutions to the table&lt;/li&gt;&lt;/ul&gt;&lt;/div&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;IBM&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Kolkata&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/816526/application-developer-oracle-cloud-integration-at-ibm/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/816526/application-developer-oracle-cloud-integration-at-ibm/</link>
  <title>[Full Time] Application Developer-Oracle Cloud Integration at IBM</title>
  <dc:date>Tue, 24 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/813924/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Databricks Unified Data Analytics Platform&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the data architecture aligns with the overall business objectives and technical specifications. You will collaborate with various teams to ensure that the data architecture is robust, scalable, and efficient, while also addressing any challenges that arise during the development process. Your role will be pivotal in shaping the data landscape of the organization, enabling data-driven decision-making and fostering innovation through effective data management practices. Responsibilities:Develop high-quality, scalable ETL/ELT pipelines using Databricks technologies including Delta Lake, Auto Loader, and DLT.Excellent programming and debugging skills in Python.Strong hands-on experience with Py Spark to build efficient data transformation and validation logic.Must be proficient in at least one cloud platform:AWS, GCP, or Azure.Create modular DBX functions for transformation, PII masking, and validation logic reusable across DLT and notebook pipelines.Implement ingestion patterns using Auto Loader with checkpointing and schema evolution for structured and semi-structured data.Build secure and observable DLT pipelines with DLT Expectations, supporting Bronze/Silver/Gold medallion layering.Configure Unity Catalog:set up catalogs, schemas, user/group access, enable audit logging, and define masking for PII fields.Enable secure data access across domains and workspaces via Unity Catalog External Locations, Volumes, and lineage tracking.Access and utilize data assets from the Databricks Marketplace to support enrichment, model training, or benchmarking.Collaborate with data sharing stakeholders to implement Delta Sharing both internally and externally.Integrate Power BI/Tableau/Looker with Databricks using optimized connectors (ODBC/JDBC) and Unity Catalog security controls.Build stakeholder-facing SQL Dashboards within Databricks to monitor KPIs, data pipeline health, and operational SLAs.Prepare Gen AI-compatible datasets:manage vector embeddings, index with Databricks Vector Search, and use Feature Store with ML flow.Package and deploy pipelines using Databricks Asset Bundles through CI/CD pipelines in GitHub or GitLab.Troubleshoot, tune, and optimize jobs using Photon engine and serverless compute, ensuring cost efficiency and SLA reliability.Experience with cloud-based services relevant to data engineering, data storage, data processing, data warehousing, real-time streaming, and serverless computing.Hands on Experience in applying Performance optimization techniquesUnderstanding data modeling and data warehousing principles is essential.Nice to Have:1.Certifications:Databricks Certified Professional or similar certifications.2.Machine Learning:Knowledge of machine learning concepts and experience with popular ML libraries.3.Knowledge of big data processing (e.g., Spark, Hadoop, Hive, Kafka)4.Data Orchestration:Apache Airflow.5.Knowledge of CI/CD pipelines and DevOps practices in a cloud environment.6.Experience with ETL tools like Informatica, Talend, Mati Llion, or Five Tran.7.Familiarity with DBT (Data Build Tool)Additional Information:- The candidate should have minimum 7.5 years of experience in Databricks Unified Data Analytics Platform.- This position is based at our Bengaluru office.&lt;br&gt;Educational Qualification:- 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/813924/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/813924/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Tue, 24 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/813559/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;Project Role :&lt;/b&gt;Data Architect&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Purview&lt;b&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;12&lt;/b&gt; year(s) of experience is required&lt;b&gt;Educational Qualification :&lt;/b&gt; 15 years full time education&lt;b&gt;&lt;b&gt;Summary&lt;/b&gt;:&lt;/b&gt;&lt;br&gt; As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration. You will collaborate with various teams to ensure that the data architecture aligns with business objectives and technical specifications, while also addressing any challenges that arise during the development process. Your role will require a blend of analytical thinking and creative problem-solving to create efficient and scalable data solutions that meet the needs of the organization. Key Responsibilities: Function as the Lead Data Architect for a small, simple project/proposal or as a team lead for medium/large sized project or proposal Candidate Should have experience in Design, develop, and deploy solutions using Microsoft Purview for data governance, cataloguing, security, Quality and compliance. Integrate Microsoft Purview with various data sources and platforms to establish seamless data lineage, metadata management, and governance workflows. Configure and customize data classification, labelling, and sensitivity policies to ensure compliance with business standards and regulatory frameworks. Candidate should have understanding of Azure Date services like Azure Data factory, Azure Databricks, Fabric and Azure SQL database. Collaborate with data architects, analysts, and IT teams to map data sources and enforce governance best practices. Monitor and troubleshoot Purview services, ensuring optimal performance and integration with Azure and other data platforms. Document technical specifications, best practices, and governance workflows, providing training and guidance to data stewards and business users. Worked in implementation of AI, Copilot use cases for Data Governance Thought leadership and innovation Experience in handling OLTP and OLAP data workloads, Star Snowflake schema, Entity Relationship diagram and other data architecture related concepts&lt;br&gt;Technical Experience: Candidate should have strong experience in developing and deploying solutions with Microsoft Purview or similar data governance platforms. Proficiency in Microsoft Azure services, including Azure Data Factory, Azure Synapse Analytics, Azure SQL Database, Azure Blob Storage Delta Lake concepts. Strong understanding of data governance principles, including metadata management, data cataloging, lineage tracking, and compliance frameworks of Azure Purview. Experience with data integration, ETL pipelines, and data modelling to structure and organize enterprise-wide data. Expert level in Designing and Architect solutions in Azure Databricks, Azure Data factory, Azure Fabric. Candidate must have knowledge of Database like Azure SQL, Oracle and Azure Data Services like ADLS, Delta/Data Lake Candidate should have good understanding of Python, Pyspark and AI features of Data governance tool Experience in Profisee/Unity Catalog is an added advantage For Level 7,8 - Candidate must have 8-15 years of IT experience and around 5+ years of extensive Data Governance experience oFor Level 9,10 - Candidate must have 5-8 years of IT experience and around 2+ years Data oGovernance experience Architect for a medium sized client delivery projectProfessional Experience: Should be able to drive the technology design meetings, propose technology design and architecture Should have excellent client communication skills Should have good analytical and problem-solving skills &lt;br&gt;Educational Qualification: Must have:BE/BTech/MCA Good to have:ME/MTech&lt;b&gt;Qualification&lt;/b&gt; 15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/813559/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/813559/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Tue, 24 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/813608/dsp-data-stewardship-platform-professional-at-cirruslabs/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;We are looking for a hands-on Data Migration Specialist with strong SQL skills and solid experience using DSP (Data Stewardship Platform) as a data migration tool. The role will focus on extracting data from SAP and other enterprise sources, transforming/validating it, and executing end-to-end migrations with strong governance, reconciliation, and defect resolution. &lt;strong&gt; Key Responsibilities &lt;/strong&gt; Execute end-to-end data migration activities using DSP (Data Stewardship Platform) including load preparation, trial loads, cutover loads, Pre and post load reports generation and validation. &lt;br&gt; Write and optimize SQL to extract, transform, validate data across source and target systems. &lt;br&gt; Build and maintain data extraction/connection approaches from SAP and other sources (other ERP, Excels ) in coordination with source system owners. &lt;br&gt; Perform cleansing rules, deduplication, standardization, Conversion, and enrichment as required for migration readiness. &lt;br&gt; Build migration deliverables: Object structure in DSP tool, source-to-target mappings given by clients, transformation logic, data rules, load plans, and runbooks. &lt;br&gt; Manage data quality checks and reconciliation (record counts, key totals, variance analysis, referential integrity). &lt;br&gt; Triage and resolve migration defects, including root-cause analysis, re-runs, and coordination with functional/technical teams. &lt;br&gt; Support cutover planning: sequencing, dependencies &lt;br&gt; Ensure adherence to data governance and security requirements (sensitive data handling, access controls). &lt;strong&gt; Required Qualifications &lt;/strong&gt; Strong hands-on SQL skills. &lt;br&gt; Hands-on experience with DSP (Data Stewardship Platform) specifically for data migration (load execution, rules/validations, issue handling, migration lifecycle). &lt;br&gt; Experience extracting/working with data from SAP and other enterprise systems. &lt;br&gt; Strong understanding of ETL/ELT and migration concepts: full vs delta loads, staging, transformation patterns, error handling, restartability, and reconciliation. &lt;br&gt; Experience working with structured delivery (defect tracking, runbooks, cutover checklists, documentation). &lt;br&gt; Strong stakeholder collaboration skills (business data owners, SAP functional/technical teams, target system teams).&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Software Product&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Cirruslabs&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/813608/dsp-data-stewardship-platform-professional-at-cirruslabs/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/813608/dsp-data-stewardship-platform-professional-at-cirruslabs/</link>
  <title>[Full Time] DSP (Data Stewardship Platform) Professional at Cirruslabs</title>
  <dc:date>Tue, 24 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/813613/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Azure Databricks&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the architecture aligns with business needs and technical specifications. You will collaborate with various teams to ensure that data flows seamlessly and efficiently throughout the organization, contributing to the overall success of data-driven initiatives. Roles &amp;amp; Responsibilities:- Expected to be an SME.- Collaborate and manage the team to perform.- Responsible for team decisions.- Engage with multiple teams and contribute on key decisions.- Provide solutions to problems for their immediate team and across multiple teams.- Facilitate knowledge sharing sessions to enhance team capabilities.- Develop and maintain documentation related to data architecture and design. Professional &amp;amp; Technical &lt;b&gt;Skills:&lt;/b&gt;&lt;li&gt;- Must To Have &lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;Proficiency in Microsoft Azure Databricks.- Strong understanding of data modeling techniques and best practices.- Experience with data integration tools and ETL processes.- Familiarity with cloud data storage solutions and architectures.- Ability to design scalable and efficient data pipelines. Additional Information:- The candidate should have minimum 5 years of experience in Microsoft Azure Databricks.- This position is based in Pune.- A 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Pune&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/813613/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/813613/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Tue, 24 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/813662/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Custom Software Engineer&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Lead the effort to design, build and configure applications, acting as the primary point of contact. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;SAP FI CO Finance&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As an Application Lead, you will lead the effort to design, build, and configure applications, acting as the primary point of contact. Your typical day will involve collaborating with various teams to ensure that project goals are met, facilitating discussions to address challenges, and guiding your team through the development process. You will also engage in strategic planning and decision-making to enhance application performance and user experience, ensuring that all stakeholders are aligned with the project objectives. Roles &amp;amp; Responsibilities&lt;li&gt;Expected to be an SME.&lt;/li&gt;&lt;li&gt;Collaborate and manage the team to perform.&lt;/li&gt;&lt;li&gt;Responsible for team decisions.&lt;/li&gt;&lt;li&gt;Engage with multiple teams and contribute on key decisions.&lt;/li&gt;&lt;li&gt;Provide solutions to problems for their immediate team and across multiple teams.&lt;/li&gt;&lt;li&gt;Facilitate training and knowledge sharing sessions to enhance team capabilities.&lt;/li&gt;&lt;li&gt;Monitor project progress and implement necessary adjustments to meet deadlines. Professional &amp;amp; Technical &lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;&lt;br&gt;Must To Have &lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;&lt;br&gt;Proficiency in SAP FI CO Finance.&lt;/li&gt;&lt;li&gt;Experience in Account Payable(AP) processes, integration with MM modules.&lt;/li&gt;&lt;li&gt;Strong understanding of financial reporting and analysis.&lt;/li&gt;&lt;li&gt;Experience with integration of SAP modules.&lt;/li&gt;&lt;li&gt;Familiarity with project management methodologies.&lt;/li&gt;&lt;li&gt;Ability to troubleshoot and resolve application issues efficiently. &lt;br&gt;Additional Information&lt;/li&gt;&lt;li&gt;The candidate should have minimum 5 years of experience in SAP FI CO Finance.&lt;/li&gt;&lt;li&gt;This position is based in Hyderabad.&lt;/li&gt;&lt;li&gt;A 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/813662/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/813662/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Tue, 24 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/813680/sample-manager-lims-developer-at-cirruslabs/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;ul&gt; &lt;li&gt; Having Sample Manager LIMS Upgradation experience preferably in 21 and above version. &lt;/li&gt; &lt;li&gt; Knowledge on instrument integration like openlab. Interface configuration and upgradation experience for SMIDI, SMIP21. &lt;/li&gt; &lt;li&gt; Proficient in creating Labels, Reports in LIMS report designer. &lt;/li&gt; &lt;li&gt; Hands on experience in VGL and C#. Strong knowledge on SQL queries. &lt;/li&gt; &lt;li&gt; Must have worked on Migration/ Upgradation before. &lt;/li&gt; &lt;li&gt; Understanding Business process flow with architecture and be a team player with ability to lead the team. &lt;/li&gt; &lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Software Product&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Analyst&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Cirruslabs&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/813680/sample-manager-lims-developer-at-cirruslabs/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/813680/sample-manager-lims-developer-at-cirruslabs/</link>
  <title>[Full Time] Sample Manager LIMS Developer at Cirruslabs</title>
  <dc:date>Tue, 24 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/813700/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;b&gt;Must have skills :&lt;/b&gt;Databricks Unified Data Analytics Platform&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NAMinimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the data architecture aligns with the overall business objectives and technical specifications. You will collaborate with various teams to ensure that the data architecture is robust, scalable, and efficient, while also addressing any challenges that arise during the development process. Your role will be pivotal in shaping the data landscape of the organization, enabling data-driven decision-making and fostering innovation through effective data management practices. Responsibilities:Develop high-quality, scalable ETL/ELT pipelines using Databricks technologies including Delta Lake, Auto Loader, and DLT.Excellent programming and debugging skills in Python.Strong hands-on experience with Py Spark to build efficient data transformation and validation logic.Must be proficient in at least one cloud platform:AWS, GCP, or Azure.Create modular DBX functions for transformation, PII masking, and validation logic reusable across DLT and notebook pipelines.Implement ingestion patterns using Auto Loader with checkpointing and schema evolution for structured and semi-structured data.Build secure and observable DLT pipelines with DLT Expectations, supporting Bronze/Silver/Gold medallion layering.Configure Unity Catalog:set up catalogs, schemas, user/group access, enable audit logging, and define masking for PII fields.Enable secure data access across domains and workspaces via Unity Catalog External Locations, Volumes, and lineage tracking.Access and utilize data assets from the Databricks Marketplace to support enrichment, model training, or benchmarking.Collaborate with data sharing stakeholders to implement Delta Sharing both internally and externally.Integrate Power BI/Tableau/Looker with Databricks using optimized connectors (ODBC/JDBC) and Unity Catalog security controls.Build stakeholder-facing SQL Dashboards within Databricks to monitor KPIs, data pipeline health, and operational SLAs.Prepare Gen AI-compatible datasets:manage vector embeddings, index with Databricks Vector Search, and use Feature Store with ML flow.Package and deploy pipelines using Databricks Asset Bundles through CI/CD pipelines in GitHub or GitLab.Troubleshoot, tune, and optimize jobs using Photon engine and serverless compute, ensuring cost efficiency and SLA reliability.Experience with cloud-based services relevant to data engineering, data storage, data processing, data warehousing, real-time streaming, and serverless computing.Hands on Experience in applying Performance optimization techniquesUnderstanding data modeling and data warehousing principles is essential.Nice to Have:1.Certifications:Databricks Certified Professional or similar certifications.2.Machine Learning:Knowledge of machine learning concepts and experience with popular ML libraries.3.Knowledge of big data processing (e.g., Spark, Hadoop, Hive, Kafka)4.Data Orchestration:Apache Airflow.5.Knowledge of CI/CD pipelines and DevOps practices in a cloud environment.6.Experience with ETL tools like Informatica, Talend, Mati Llion, or Five Tran.7.Familiarity with DBT (Data Build Tool)Additional Information:- The candidate should have minimum 7.5 years of experience in Databricks Unified Data Analytics Platform.- This position is based at our Bengaluru office.&lt;br&gt;Educational Qualification:- 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/813700/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/813700/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Tue, 24 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/813710/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;b&gt;Must have skills :&lt;/b&gt;Google Cloud Platform Architecture&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NAMinimum &lt;b&gt;15&lt;/b&gt; year(s) of experience is required&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will design and deliver end-to-end data architecture solutions for platforms, products, or engagements on Google Cloud. You will define architectures that meet performance, scalability, security, and compliance requirements while ensuring data integrity and accessibility. You will be responsible for the successful implementation of data solutions that align with business strategy.&lt;br&gt;Roles &amp;amp; Responsibilities:Expected to be a Subject Matter Expert (SME) with deep expertise in Google Cloud data architecture.Provide strategic guidance, influencing architectural decisions across multiple teams.Collaborate with stakeholders to define data strategies, roadmaps, and governance models.Design enterprise-grade data architectures supporting analytics, AI/ML, and operational workloads.Ensure solutions adhere to best practices for security, performance, and cost optimization.Lead the implementation of data architecture frameworks and reference models.Guide teams on data migration, integration, and modernization initiatives.Professional &amp;amp; Technical &lt;b&gt;Skills:&lt;/b&gt;&lt;li&gt;Must To Have &lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;Expertise in Google Cloud data services (BigQuery, Cloud Storage, Pub/Sub, Dataflow, Dataproc, etc.).Strong knowledge of data architecture principles, data modeling, and data governance.Proven experience in designing scalable, high-performance, and secure cloud-based data platforms.Hands-on experience with data ingestion, ETL/ELT, streaming, and batch processing.Familiarity with compliance frameworks and data security best practices in cloud environments.&lt;br&gt;Additional Information:The candidate should have a minimum of 16 years of experience in data architecture, with a strong focus on Google Cloud.This position is based Pan India&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/813710/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/813710/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Tue, 24 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/813782/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;Project Role :&lt;/b&gt;Data Architect&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Purview&lt;b&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;b&gt;Educational Qualification :&lt;/b&gt; 15 years full time education&lt;b&gt;&lt;b&gt;Summary&lt;/b&gt;:&lt;/b&gt;&lt;br&gt; As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration. You will collaborate with various teams to ensure that the data architecture aligns with business objectives and technical specifications, while also addressing any challenges that arise during the development process. Your role will require a blend of analytical thinking and creative problem-solving to create efficient and scalable data solutions that meet the needs of the organization. Key Responsibilities: Function as the Lead Data Architect for a small, simple project/proposal or as a team lead for medium/large sized project or proposal Candidate Should have experience in Design, develop, and deploy solutions using Microsoft Purview for data governance, cataloguing, security, Quality and compliance. Integrate Microsoft Purview with various data sources and platforms to establish seamless data lineage, metadata management, and governance workflows. Configure and customize data classification, labelling, and sensitivity policies to ensure compliance with business standards and regulatory frameworks. Candidate should have understanding of Azure Date services like Azure Data factory, Azure Databricks, Fabric and Azure SQL database. Collaborate with data architects, analysts, and IT teams to map data sources and enforce governance best practices. Monitor and troubleshoot Purview services, ensuring optimal performance and integration with Azure and other data platforms. Document technical specifications, best practices, and governance workflows, providing training and guidance to data stewards and business users. Worked in implementation of AI, Copilot use cases for Data Governance Thought leadership and innovation Experience in handling OLTP and OLAP data workloads, Star Snowflake schema, Entity Relationship diagram and other data architecture related concepts&lt;br&gt;Technical Experience: Candidate should have strong experience in developing and deploying solutions with Microsoft Purview or similar data governance platforms. Proficiency in Microsoft Azure services, including Azure Data Factory, Azure Synapse Analytics, Azure SQL Database, Azure Blob Storage Delta Lake concepts. Strong understanding of data governance principles, including metadata management, data cataloging, lineage tracking, and compliance frameworks of Azure Purview. Experience with data integration, ETL pipelines, and data modelling to structure and organize enterprise-wide data. Expert level in Designing and Architect solutions in Azure Databricks, Azure Data factory, Azure Fabric. Candidate must have knowledge of Database like Azure SQL, Oracle and Azure Data Services like ADLS, Delta/Data Lake Candidate should have good understanding of Python, Pyspark and AI features of Data governance tool Experience in Profisee/Unity Catalog is an added advantage Candidate must have 8-15 years of IT experience and around 5+ years of extensive Data Governance experience Architect for a medium sized client delivery projectProfessional Experience: Should be able to drive the technology design meetings, propose technology design and architecture Should have excellent client communication skills Should have good analytical and problem-solving skills &lt;br&gt;Educational Qualification: Must have:BE/BTech/MCA Good to have:ME/MTech&lt;b&gt;Qualification&lt;/b&gt; 15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/813782/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/813782/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Tue, 24 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/813792/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;Project Role :&lt;/b&gt;Data Architect&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Purview&lt;b&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;5&lt;/b&gt; year(s) of experience is required&lt;b&gt;Educational Qualification :&lt;/b&gt; 15 years full time education&lt;b&gt;&lt;b&gt;Summary&lt;/b&gt;:&lt;/b&gt;&lt;br&gt; As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration. You will collaborate with various teams to ensure that the data architecture aligns with business objectives and technical specifications, while also addressing any challenges that arise during the development process. Your role will require a blend of analytical thinking and creative problem-solving to create efficient and scalable data solutions that meet the needs of the organization. Key Responsibilities: Function as the Lead Data Architect for a small, simple project/proposal or as a team lead for medium/large sized project or proposal Candidate Should have experience in Design, develop, and deploy solutions using Microsoft Purview for data governance, cataloguing, security, Quality and compliance. Integrate Microsoft Purview with various data sources and platforms to establish seamless data lineage, metadata management, and governance workflows. Configure and customize data classification, labelling, and sensitivity policies to ensure compliance with business standards and regulatory frameworks. Candidate should have understanding of Azure Date services like Azure Data factory, Azure Databricks, Fabric and Azure SQL database. Collaborate with data architects, analysts, and IT teams to map data sources and enforce governance best practices. Monitor and troubleshoot Purview services, ensuring optimal performance and integration with Azure and other data platforms. Document technical specifications, best practices, and governance workflows, providing training and guidance to data stewards and business users. Worked in implementation of AI, Copilot use cases for Data Governance Thought leadership and innovation Experience in handling OLTP and OLAP data workloads, Star Snowflake schema, Entity Relationship diagram and other data architecture related concepts&lt;br&gt;Technical Experience: Candidate should have strong experience in developing and deploying solutions with Microsoft Purview or similar data governance platforms. Proficiency in Microsoft Azure services, including Azure Data Factory, Azure Synapse Analytics, Azure SQL Database, Azure Blob Storage Delta Lake concepts. Strong understanding of data governance principles, including metadata management, data cataloging, lineage tracking, and compliance frameworks of Azure Purview. Experience with data integration, ETL pipelines, and data modelling to structure and organize enterprise-wide data. Expert level in Designing and Architect solutions in Azure Databricks, Azure Data factory, Azure Fabric. Candidate must have knowledge of Database like Azure SQL, Oracle and Azure Data Services like ADLS, Delta/Data Lake Candidate should have good understanding of Python, Pyspark and AI features of Data governance tool Experience in Profisee/Unity Catalog is an added advantage Candidate must have 5-8 years of IT experience and around 2+ years Data Architect for a medium sized client delivery projectProfessional Experience: Should be able to drive the technology design meetings, propose technology design and architecture Should have excellent client communication skills Should have good analytical and problem-solving skills &lt;br&gt;Educational Qualification: Must have:BE/BTech/MCA Good to have:ME/MTech&lt;b&gt;Qualification&lt;/b&gt; 15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/813792/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/813792/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Tue, 24 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/813805/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;JavaScript&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;Java&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the architecture aligns with the overall business objectives and technical standards. You will collaborate with various teams to ensure that the data architecture supports the applications functionality and performance needs, while also considering scalability and security aspects. Roles &amp;amp; Responsibilities&lt;li&gt;Expected to be an SME.&lt;/li&gt;&lt;li&gt;Collaborate and manage the team to perform.&lt;/li&gt;&lt;li&gt;Responsible for team decisions.&lt;/li&gt;&lt;li&gt;Engage with multiple teams and contribute on key decisions.&lt;/li&gt;&lt;li&gt;Provide solutions to problems for their immediate team and across multiple teams.&lt;/li&gt;&lt;li&gt;Facilitate knowledge sharing sessions to enhance team capabilities.&lt;/li&gt;&lt;li&gt;Evaluate and recommend tools and technologies that can improve data architecture. Professional &amp;amp; Technical &lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;&lt;br&gt;Must To Have &lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;&lt;br&gt;Proficiency in JavaScript.&lt;/li&gt;&lt;li&gt;Secondary &lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;Experience with Java.Strong in Java, Agile development, CI/CD&lt;/li&gt;&lt;li&gt;Strong understanding of data modeling techniques and best practices.&lt;/li&gt;&lt;li&gt;Experience with database design and optimization.&lt;/li&gt;&lt;li&gt;Familiarity with cloud data storage solutions and integration methods. &lt;br&gt;Additional Information&lt;/li&gt;&lt;li&gt;The candidate should have minimum 7.5 years of experience in JavaScript.&lt;/li&gt;&lt;li&gt;This position is based at our Bengaluru office.&lt;/li&gt;&lt;li&gt;A 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/813805/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/813805/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Tue, 24 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/813832/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Data &amp;amp; AI Solution Architecture, Microsoft Azure Databricks, Databricks Unified Data Analytics Platform&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the architecture aligns with business needs and technical specifications. You will collaborate with various teams to ensure the successful implementation of data solutions, while also addressing any challenges that arise during the development process. Roles &amp;amp; Responsibilities:&lt;li&gt;Expected to be an SME.&lt;/li&gt;&lt;li&gt;Collaborate and manage the team to perform.&lt;/li&gt;&lt;li&gt;Responsible for team decisions.&lt;/li&gt;&lt;li&gt;Engage with multiple teams and contribute on key decisions.&lt;/li&gt;&lt;li&gt;Provide solutions to problems for their immediate team and across multiple teams.&lt;/li&gt;&lt;li&gt;Facilitate knowledge sharing sessions to enhance team capabilities.&lt;/li&gt;&lt;li&gt;Evaluate and recommend new technologies to improve data architecture. Professional &amp;amp; Technical &lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;&lt;br&gt;Must To Have &lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;&lt;br&gt;Proficiency in Data &amp;amp; AI Solution Architecture, Microsoft Azure Databricks, Databricks Unified Data Analytics Platform.&lt;/li&gt;&lt;li&gt;Strong understanding of data modeling techniques and best practices.&lt;/li&gt;&lt;li&gt;Experience with cloud-based data storage solutions and architectures.&lt;/li&gt;&lt;li&gt;Familiarity with data integration tools and ETL processes.&lt;/li&gt;&lt;li&gt;Ability to design scalable and efficient data pipelines. Additional Information:&lt;/li&gt;&lt;li&gt;The candidate should have minimum 7.5 years of experience in Data &amp;amp; AI Solution Architecture.&lt;/li&gt;&lt;li&gt;This position is based at our Bengaluru office.&lt;/li&gt;&lt;li&gt;A 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/813832/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/813832/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Tue, 24 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/813846/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Custom Software Engineer&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Lead the effort to design, build and configure applications, acting as the primary point of contact. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;SAP FI CO Finance&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As an Application Lead, you will lead the effort to design, build, and configure applications, acting as the primary point of contact. Your typical day will involve collaborating with various teams to ensure that project goals are met, facilitating discussions to address challenges, and guiding your team through the development process. You will also engage in strategic planning and decision-making to enhance application performance and user experience, ensuring that all stakeholders are aligned with the project objectives. Roles &amp;amp; Responsibilities&lt;li&gt;Expected to be an SME.&lt;/li&gt;&lt;li&gt;Collaborate and manage the team to perform.&lt;/li&gt;&lt;li&gt;Responsible for team decisions.&lt;/li&gt;&lt;li&gt;Engage with multiple teams and contribute on key decisions.&lt;/li&gt;&lt;li&gt;Provide solutions to problems for their immediate team and across multiple teams.&lt;/li&gt;&lt;li&gt;Facilitate training and knowledge sharing sessions to enhance team capabilities.&lt;/li&gt;&lt;li&gt;Monitor project progress and implement necessary adjustments to meet deadlines. Professional &amp;amp; Technical &lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;&lt;br&gt;Must To Have &lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;&lt;br&gt;Proficiency in SAP FI CO Finance.&lt;/li&gt;&lt;li&gt;Experience in Account Payable(AP) processes, integration with MM modules.&lt;/li&gt;&lt;li&gt;Strong understanding of financial reporting and analysis.&lt;/li&gt;&lt;li&gt;Experience with integration of SAP modules.&lt;/li&gt;&lt;li&gt;Familiarity with project management methodologies.&lt;/li&gt;&lt;li&gt;Ability to troubleshoot and resolve application issues efficiently. &lt;br&gt;Additional Information&lt;/li&gt;&lt;li&gt;The candidate should have minimum 5 years of experience in SAP FI CO Finance.&lt;/li&gt;&lt;li&gt;This position is based in Hyderabad.&lt;/li&gt;&lt;li&gt;A 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/813846/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/813846/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Tue, 24 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/813881/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Databricks Unified Data Analytics Platform&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;UNIX, Sun Solaris, HP UX, IBM &lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the data architecture aligns with the overall business objectives and technical specifications. You will collaborate with various teams to ensure that the data architecture is robust, scalable, and efficient, while also addressing any challenges that arise during the development process. Your role will be pivotal in shaping the data landscape of the organization, enabling data-driven decision-making and fostering innovation through effective data management practices. Responsibilities:Develop high-quality, scalable ETL/ELT pipelines using Databricks technologies including Delta Lake, Auto Loader, and DLT.Excellent programming and debugging skills in Python.Strong hands-on experience with Py Spark to build efficient data transformation and validation logic.Must be proficient in at least one cloud platform:AWS, GCP, or Azure.Create modular DBX functions for transformation, PII masking, and validation logic reusable across DLT and notebook pipelines.Implement ingestion patterns using Auto Loader with checkpointing and schema evolution for structured and semi-structured data.Build secure and observable DLT pipelines with DLT Expectations, supporting Bronze/Silver/Gold medallion layering.Configure Unity Catalog:set up catalogs, schemas, user/group access, enable audit logging, and define masking for PII fields.Enable secure data access across domains and workspaces via Unity Catalog External Locations, Volumes, and lineage tracking.Access and utilize data assets from the Databricks Marketplace to support enrichment, model training, or benchmarking.Collaborate with data sharing stakeholders to implement Delta Sharing both internally and externally.Integrate Power BI/Tableau/Looker with Databricks using optimized connectors (ODBC/JDBC) and Unity Catalog security controls.Build stakeholder-facing SQL Dashboards within Databricks to monitor KPIs, data pipeline health, and operational SLAs.Prepare Gen AI-compatible datasets:manage vector embeddings, index with Databricks Vector Search, and use Feature Store with ML flow.Package and deploy pipelines using Databricks Asset Bundles through CI/CD pipelines in GitHub or GitLab.Troubleshoot, tune, and optimize jobs using Photon engine and serverless compute, ensuring cost efficiency and SLA reliability.Experience with cloud-based services relevant to data engineering, data storage, data processing, data warehousing, real-time streaming, and serverless computing.Hands on Experience in applying Performance optimization techniquesUnderstanding data modeling and data warehousing principles is essential.Nice to Have:1.Certifications:Databricks Certified Professional or similar certifications.2.Machine Learning:Knowledge of machine learning concepts and experience with popular ML libraries.3.Knowledge of big data processing (e.g., Spark, Hadoop, Hive, Kafka)4.Data Orchestration:Apache Airflow.5.Knowledge of CI/CD pipelines and DevOps practices in a cloud environment.6.Experience with ETL tools like Informatica, Talend, Mati Llion, or Five Tran.7.Familiarity with DBT (Data Build Tool)Additional Information:- The candidate should have minimum 7.5 years of experience in Databricks Unified Data Analytics Platform.- This position is based at our Bengaluru office.&lt;br&gt;Educational Qualification:- 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/813881/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/813881/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Tue, 24 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/812451/senior-software-engineer-at-walmart/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Position Summary...&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; As a Senior Software Engineer in the Cloud and Data Foundations organization in Walmart, you will play a pivotal role in owning , designing, building, and maintaining high-quality software solutions in the database platform engineering domain. We are seeking engineers who combine engineering excellence in datastore fleet management, automating operational workflows with a collaborative mindset and a passion for customer-centric problem solving. You will work closely with cross-functional partners to deliver robust, scalable, and maintainable solutions for the cloud and data foundations organization that power one of the world s largest retail platform. &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;What youll do...&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;p&gt; &lt;b&gt;About the team&lt;/b&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt;The Database Frameworks is part of the Cloud and Data foundations organization in Walmart. The Database Frameworks team s mission is to provide data access and data lifecycle management, with client-side abstractions in a multi-cloud polyglot database ecosystem. By building intuitive, standards-based DB abstraction layers the team enables developers in the Walmart Global Tech organization to work efficiently across diverse database management systems deployed as PaaS and IaaS solutions . The mission is to abstract away platform-specific details, accelerate development, and ensure portability and scalability for Walmart s applications, supporting our multi-cloud strategy with best-of-breed datastore technology landscape.&lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;b&gt;What You ll Do &lt;/b&gt;:&lt;/p&gt; &lt;ul&gt; &lt;li&gt;Own, Design and Deliver production-grade software for DB fleet management using Java / similar languages, adhering to best practices in code quality, performance, and maintainability.&lt;/li&gt; &lt;li&gt;Own feature design, production readiness for various database access-abstraction frameworks.&lt;/li&gt; &lt;li&gt;Continously augments the database access-abstraction framework features to drive operational efficiency and reliability for the SQL and NoSQL database fleet in Walmart.&lt;/li&gt; &lt;li&gt;Owns and delivers new features for assigned business critical themes, from high level design through deployment and ongoing support.&lt;/li&gt; &lt;li&gt;Collaborate with product managers, architects, and other stakeholders to contribute to technical decisions.&lt;/li&gt; &lt;li&gt;Mentors mid-level engineers by conducting code, design reviews, sharing knowledge, and fostering a culture of continuous improvement.&lt;/li&gt; &lt;li&gt;Support customer application teams and partner platform teams during incident resolution, demonstrating high customer empathy and a commitment to operational excellence.&lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;b&gt;What You ll bring &lt;/b&gt;:&lt;/p&gt; &lt;ul&gt; &lt;li&gt;6-10 years of professional experience in software engineering, with a proven track record of desiging and delivering high-quality solutions in a fast-paced environment.&lt;/li&gt; &lt;li&gt;Proficiency in Java and advanced knowledge of devops tools such as Jenkins, Ansible.&lt;/li&gt; &lt;li&gt;Proven record of designing and delivering production grade client-side abstraction layers for various DB technologies.&lt;/li&gt; &lt;li&gt;Advanced knowledge of at least two datastore technologies, such as CosmosDB, AzureSQL, Cassandra, or Opensearch.&lt;/li&gt; &lt;li&gt;Proven record of adhering to the tenets of Operational excellence for datastore fleet management.&lt;/li&gt; &lt;li&gt; &lt;span&gt;Preferred Skill- Developing automation workflows with AI engineering frameworks like LangChain, vector databases,MCPs and large language models (LLMs) that enables seamless orchestration of complex operational tasks.&lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;b&gt;About Walmart Global Tech&lt;/b&gt; &lt;br&gt;Imagine working in an environment where one line of code can make life easier for hundreds of millions of people. That s what we do at Walmart Global Tech. We re a team of software engineers, data scientists, cybersecurity experts and service professionals within the world s leading retailer who make an epic impact and are at the forefront of the next retail disruption. People are why we innovate, and people power our innovations. We are people-led and tech-empowered.&lt;/p&gt; &lt;p&gt;We train our team in the skillsets of the future and bring in experts like you to help us grow. We have roles for those chasing their first opportunity as well as those looking for the opportunity that will define their career. Here, you can kickstart a great career in tech, gain new skills and experience for virtually every industry, or leverage your expertise to innovate at scale, impact millions and reimagine the future of retail.&lt;/p&gt; &lt;p&gt; &lt;b&gt;Flexible work&lt;/b&gt; &lt;/p&gt; &lt;p&gt; &lt;b&gt;Walmart s culture sets us apart, and we know being together helps us innovate, learn and grow great careers. This role is based in our [Bangalore/Chennai] office for daily work, with the flexibility for associates to manage their personal lives.&lt;/b&gt; &lt;br&gt; &lt;/p&gt; &lt;p&gt; &lt;b&gt;Benefits&lt;/b&gt; &lt;/p&gt; &lt;p&gt;Beyond our great compensation package, you can receive incentive awards for your performance. Other great perks include a host of best-in-class benefits maternity and parental leave, PTO, health benefits, and much more.&lt;/p&gt; &lt;p&gt; &lt;b&gt;Belonging&lt;/b&gt; &lt;/p&gt; &lt;p&gt;We aim to create a culture where every associate feels valued for who they are, rooted in respect for the individual. Our goal is to foster a sense of belonging, to create opportunities for all our associates, customers and suppliers, and to be a Walmart for everyone.&lt;/p&gt; &lt;p&gt;At Walmart, our vision is &quot;everyone included.&quot; By fostering a workplace culture where everyone is and feels included, everyone wins. Our associates and customers reflect the makeup of all 19 countries where we operate. By making Walmart a welcoming place where all people feel like they belong, we re able to engage associates, strengthen our business, improve our ability to serve customers, and support the communities where we operate.&lt;/p&gt; &lt;p&gt; &lt;b&gt;Equal Opportunity Employer&lt;/b&gt; &lt;/p&gt; &lt;p&gt;Walmart, Inc., is an Equal Opportunities Employer By Choice. We believe we are best equipped to help our associates, customers and the communities we serve live better when we really know them. That means understanding, respecting and valuing unique styles, experiences, identities, ideas and opinions while being inclusive of all people.&lt;/p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; &lt;span&gt;Minimum Qualifications...&lt;/span&gt; &lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;i&gt; &lt;span&gt; &lt;i&gt;Outlined below are the required minimum qualifications for this position. If none are listed, there are no minimum qualifications. &lt;/i&gt; &lt;/span&gt; &lt;/i&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; Option 1: Bachelors degree in computer science, computer engineering, computer information systems, software engineering, or related area and 3 years experience in software engineering or related area.&lt;br&gt;Option 2: 5 years experience in software engineering or related area. &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; &lt;span&gt;Preferred Qualifications...&lt;/span&gt; &lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;i&gt; &lt;i&gt;Outlined below are the optional preferred qualifications for this position. If none are listed, there are no preferred qualifications. &lt;/i&gt; &lt;/i&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; Master s degree in computer science, information technology, engineering, information systems, cybersecurity, or related area and 1 year s experience leading information security or cybersecurity projects, We value candidates with a background in creating inclusive digital experiences, demonstrating knowledge in implementing Web Content Accessibility Guidelines (WCAG) 2.2 AA standards, assistive technologies, and integrating digital accessibility seamlessly. The ideal candidate would have knowledge of accessibility best practices and join us as we continue to create accessible products and services following Walmart s accessibility standards and guidelines for supporting an inclusive culture. Information Technology - CISCO Certification - Certification &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; &lt;span&gt;Primary Location...&lt;/span&gt; &lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; G, 1, 3, 4, 5 Floor, Building 11, Sez, Cessna Business Park, Kadubeesanahalli Village, Varthur Hobli , India&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Walmart&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/812451/senior-software-engineer-at-walmart/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/812451/senior-software-engineer-at-walmart/</link>
  <title>[Full Time] Senior, Software Engineer at Walmart</title>
  <dc:date>Wed, 18 Feb 2026 02:09:27 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/813096/denodo-developer-at-itc-infotech/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;Senior Denodo Developer SME Experienced Senior Denodo Developer Subject Matter Expert SME to design, develop, and govern enterprise-scale data virtualization solutions using Denodo. The ideal candidate to bring deep technical expertise, strong data architecture knowledge, and a consulting mindset to enable trusted, high-performance data access across the organization. This role requires close collaboration with data platform teams, DBAs, security, architecture, and business stakeholders, and will guide teams on best practices, limitations, and optimal usage of Denodo in a business context, with leadership and mentoring responsibilities. Key Responsibilities Business Understanding Analytics Engage with business stakeholders to understand data consumption needs and analytical use cases Translate business requirements into Denodo logical and physical data models Perform data analysis to assess source system structure, data quality, and availability Ensure compliance with Business regulations, audit, lineage, and data governance standards. Enable self-service analytics and BI by delivering curated, business-friendly data views Define and track KPIs related to data performance, reliability, and usage Communicate data limitations, performance considerations, and best practices to stakeholders Development Design and develop Denodo virtual databases, views, and services Implement complex joins, aggregations, derived views, and reusable data services Optimize queries using caching, indexing, query hints, and cost-based optimization Develop REST and SOAP web services using Denodo Integrate Denodo with BI tools, analytics platforms, and downstream applications Implement CICD and version control practices for Denodo artifacts Troubleshoot and resolve performance, connectivity, and data issues Own end-to-end Denodo platform and operations VDP, Scheduler, Monitor, Solution Manager. Define and govern BaseViews DerivedViews InterfaceViews layering, ensuring Denodo is used strictly as a data access and abstraction layer. Lead Denodo development with strong focus on SQL optimization, push-down, caching, and performance tuning. Design and manage cache strategies, SLAs, capacity, HADR, and platform scalability. Implement security, RBAC, rowcolumn controls, masking, and integrate with LDAPADIAM. Act as L3L4 escalation owner for production issues, RCA, upgrades, and platform lifecycle. Work closely with DB, data engineering, BI, and infrastructure teams to offload heavy transformations to source systems. Support in integration of enterprise reporting tools with secure and performant Denodo access. Operate as a platform owner and SME, mentoring teams and enforcing clear Denodo usage guardrails. Modelling Design and manage Denodo logical data models and semantic layers Implement best practices for view layering base, derived, and presentation views Apply data modelling concepts including dimensional, relational, and canonical models Define metadata, business definitions, and data lineage Ensure consistency, reusability, and scalability of virtual data models Collaborate with data architects on enterprise data standards Data Platform Tool Stack Core Platform o Denodo Platform o Denodo Virtual DataPort VDP o Denodo Scheduler Data Catalog Data Sources Integration o Relational databases Oracle, SQL Server, PostgreSQL, MySQL o Cloud data platforms Azure Synapse o File-based sources CSV, Parquet, JSON, XML o APIs, web services, and message-based integrations Development Analytics Tools o SQL advanced proficiency o Denodo Design Studio Solution Manager o BI tools: Power BI, Qlik o REST clients Postman, Swagger DevOps, Security Monitoring o Version control using Git o CICD integration o Authentication authorization using LDAP SSO OAuth o Performance monitoring, query profiling, and usage analytics Consulting Stakeholder Management Act as a trusted advisor for enterprise data virtualization and Denodo adoption Lead requirement discovery sessions, design workshops, and solution walkthroughs Guide stakeholders on data access patterns, semantic modelling, and performance trade-offs Collaborate with data governance, security, and compliance teams Support architecture reviews, pre-sales discussions, and solution proposals when required Influence data strategy decisions and promote virtualization best practices Leadership Mentoring Serve as Denodo SME and escalation point for complex technical issues Mentor junior developers and data engineers on Denodo best practices Lead design and code reviews for Denodo implementations Define standards, reusable frameworks, and governance guidelines Support onboarding, training, and knowledge-sharing initiatives Contribute to Denodo roadmap planning and capability maturity Soft Skills Strong communication presentation skills Business-first mindset Problem-solving analytical thinking Ability to work independently and lead initiatives Nice to Have Experience with large-scale enterprise or regulated environments Exposure to cloud-native data platforms and modern analytics architectures Strong documentation, presentation, and stakeholder communication skills Experience working in consulting or system integrator environments Experience working in consulting, client-facing, or large enterprise environments&lt;/p&gt; &lt;br&gt;&lt;b&gt; Roles and Responsibilities&lt;/b&gt; &lt;br&gt;&lt;p&gt;Own end-to-end Denodo platform and operations VDP, Scheduler, Monitor, Solution Manager. Define and govern BaseViews DerivedViews InterfaceViews layering, ensuring Denodo is used strictly as a data access and abstraction layer. Lead Denodo development with strong focus on SQL optimization, push-down, caching, and performance tuning. Design and manage cache strategies, SLAs, capacity, HADR, and platform scalability. Implement security, RBAC, rowcolumn controls, masking, and integrate with LDAPADIAM. Ensure compliance with banking regulations, audit, lineage, and data governance standards. Act as L3L4 escalation owner for production issues, RCA, upgrades, and platform lifecycle. Work closely with DB, data engineering, BI, and infrastructure teams to offload heavy transformations to source systems.&lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;ITC Infotech&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/813096/denodo-developer-at-itc-infotech/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/813096/denodo-developer-at-itc-infotech/</link>
  <title>[Full Time] Denodo Developer at ITC Infotech</title>
  <dc:date>Tue, 17 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/813926/dbt-developer-at-luxoft/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;li&gt; Project description &lt;br&gt; DXC - a Fortune 500 global IT services leader. At DXC Technology we deliver the mission-critical IT services that move the world. Every day we use the power of technology to build better futures for our customers, colleagues, environment, and communities across the globe.&lt;br&gt;We are flexible - we provide everything you need to comfortably work from home, but we also keep our offices open for collaboration, meetings, and building a strong team spirit. We tailor everyone&apos;s development path to their individual interests through training and additional certifications. &lt;br&gt; &lt;/li&gt;&lt;li&gt;Responsibilities &lt;/li&gt;&lt;li&gt;&lt;/li&gt;&lt;li&gt;&lt;br&gt; Design, develop, and maintain DBT models, transformations, and SQL code to build efficient data pipelines for analytics and reporting. &lt;br&gt; &lt;/li&gt;&lt;li&gt; &lt;br&gt; Design, develop, and maintain ETL/ELT pipelines using DBT and pulling datafrom Snowflake. &lt;br&gt; &lt;/li&gt;&lt;li&gt; &lt;br&gt; Define and implement data modelling best practices, including data warehousing, ETL processes, and data transformations using DBT. &lt;br&gt; &lt;/li&gt;&lt;li&gt; &lt;br&gt; Build complex SQL queries within DBT to build incremental models, enhancing data processing efficiency. &lt;br&gt; &lt;/li&gt;&lt;li&gt; &lt;br&gt; Establish data governance practices and ensure data accuracy, quality, and consistency within the data transformation process. &lt;br&gt; &lt;/li&gt;&lt;li&gt; &lt;br&gt; Collaborate with data engineers, data analysts, and other stakeholders to understand and meet data requirements for various business units. &lt;br&gt; &lt;/li&gt;&lt;li&gt; &lt;br&gt; Identify and address performance bottlenecks in data transformation processes and optimize DBT models for faster query performance. &lt;br&gt; &lt;/li&gt;&lt;li&gt; &lt;br&gt; Maintain thorough documentation of DBT models, transformations, and data dictionaries to ensure transparency and accessibility to team members. &lt;br&gt; &lt;/li&gt;&lt;li&gt; &lt;br&gt; Implement data security measures to protect sensitive information and comply with data privacy regulations. &lt;br&gt; &lt;/li&gt;&lt;li&gt; &lt;br&gt; Stay updated on industry best practices and new features in DBT, and continuously improve the data transformation processes. &lt;br&gt; &lt;/li&gt;&lt;li&gt; &lt;br&gt; Provide training and support to other team members in using DBT effectively. &lt;br&gt; &lt;/li&gt;&lt;li&gt; &lt;br&gt; Implement data quality checks and validation processes to ensure data accuracy and consistency. &lt;br&gt; &lt;/li&gt;&lt;li&gt;Skills &lt;br&gt; Must have &lt;br&gt; &lt;/li&gt;&lt;li&gt;&lt;/li&gt;&lt;li&gt;&lt;br&gt; 8+ years of experience in DBT. &lt;br&gt; &lt;/li&gt;&lt;li&gt; &lt;br&gt; Hands-on experience in implementing data governance, data quality rules and validation mechanisms within Collibra is added plus. &lt;br&gt; &lt;/li&gt;&lt;li&gt; &lt;br&gt; Knowledge of workflow orchestration tools like Tidal. &lt;br&gt; &lt;/li&gt;&lt;li&gt; &lt;br&gt; Experience with Python or other scripting languages is a plus. &lt;br&gt; &lt;/li&gt;&lt;li&gt; &lt;br&gt; Familiarity with Azure cloud platforms. &lt;br&gt; &lt;/li&gt;&lt;li&gt; &lt;br&gt; Exposure to DevOps practices and CI/CD pipelines for data engineering. Nice to have &lt;br&gt; &lt;/li&gt;&lt;li&gt;&lt;/li&gt;&lt;li&gt;&lt;br&gt; Certification &lt;br&gt; &lt;/li&gt;&lt;li&gt;&lt;b&gt;Location - &lt;/b&gt;pune,mumbai,chennai,banagalore&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Legal&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Luxoft&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Mumbai&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/813926/dbt-developer-at-luxoft/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/813926/dbt-developer-at-luxoft/</link>
  <title>[Full Time] DBT Developer at Luxoft</title>
  <dc:date>Tue, 17 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/815553/database-administrator-at-sunquest-information/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;In this role you will get the opportunity to work with Clinisys which is a global leader in healthcare information technology&lt;br&gt; As a part of the Solution Adoption team, you will act as a liaison between the business and its customers and will be responsible to deliver impeccable service by providing high quality software solutions&lt;br&gt; As an Oracle DBA you will perform essential database functions from development through production including but not limited to database installations, upgrades, troubleshooting, and conversions for both client based and cloud hosted products&lt;br&gt; Additionally, in this role you will be accountable for documentation for all database functions, Essential Functions?/ Job Responsibilities&lt;br&gt; Install Oracle database patches and troubleshoot various issues related to database patching, Perform production database health check, Detect potential issues that may impact production database operation and resolve the issues, Performs database support for both production and development environments, including the installation of databases, back-ups, replication, restores, patches, and upgrades, Works closely with product development teams to create and manage product databases, solve problems, and optimize performance, Provide product database support for client installations and knowledge of applying security patches on Oracle Databases, Ability to configure replication as a part of disaster recovery process, Identify and resolve database issues that impact application performance, Follows appropriate sign-off and escalation procedures for database changes and version upgrades, Document database installations, replications, upgrades, and conversions, Stay ahead of the latest database versions and features, Qualify latest database versions for application use and recommend and implement emerging database technologies, Craft and deliver training materials as assigned, Prepare reports or correspondence concerning project specifications, activities, or status, All other duties and responsibilities as assigned, Strong command of Oracle, Oracle server tools and Oracle Data Guard, Advanced knowledge of database security, backup and recovery, performance monitoring and tuning standards, Understanding of relational and dimensional data modelling, Strong mathematical and statistical knowledge, Impeccable attention to detail, Develop processes for optimizing database security, Create and manage database reports, visualizations, and dashboards, Create automation for repeating database tasks, Be available for on-call support as needed, Skills Needed To Be Successful&lt;br&gt; Deep knowledge of Oracle servers/databases and Oracle database development and Oracle Data Guard, Deep knowledge in both OLTP and OLAP database designs and administration, Proven System Administration skills, Innovative thinker with a commitment to improve processes and methodologies&lt;br&gt; Strong written and verbal communications skills, Excellent client management &amp;amp; client service skills&lt;br&gt; Create/Maintain Oracle Database Projects for development, Knowledge and experience preferred with AI-driven development and automation tools such as GitHub Copilot, Copilot Studio, and related platforms to optimize database administration tasks, implement intelligent query tuning, and enhance operational efficiency&lt;br&gt; Demonstrated understanding of AI concepts including Model Context Protocol (MCP), Prompt Engineering, and integration of AI-assisted workflows into DevOps and CI/CD pipelines, Required Experience &amp;amp; Education&lt;br&gt; Bachelor&apos;s degree in business or computer science, or equivalent education and work experience combined, Minimum 3 to 5 years of Oracle database administration and Oracle Data Guard experience, Prior exposure to software development processes and/or methodologies, Familiarity with on premise and cloud-based implementations, Preferred Experience &amp;amp; Education&lt;br&gt; Database certification(s) preferred, Prior experience with server class hardware, operating systems, and virtualization, Supervisory Responsibilities&lt;br&gt; None&lt;br&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Software Product&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Sunquest Information&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/815553/database-administrator-at-sunquest-information/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/815553/database-administrator-at-sunquest-information/</link>
  <title>[Full Time] Database Administrator at Sunquest Information</title>
  <dc:date>Mon, 16 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/812163/mainframe-db2-dba-admin-not-developer-at-tata-consultancy/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;TCS has been a great pioneer in feeding the fire of young techies like you. We are a global leader in the technology arena and theres nothing that can stop us from growing together.&lt;/p&gt;&lt;p&gt;&lt;strong&gt;What we are looking for&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Role: Mainframe DB2 DBA&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Experience Range: 7  15 Years&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Location : Bangalore/Mumbai&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Must Have:&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Experience in Mainframe Db2 v12/ v13 Database System-Administration on z/OS&lt;/li&gt;&lt;li&gt;DB2 installation, upgrade, patching, configuration and operations&lt;/li&gt;&lt;li&gt;Proficiency in DB2, VSAM Systems Performance Tuning and Capacity Planning&lt;/li&gt;&lt;li&gt;Experience in Data Sharing setup in Parallel Sysplex environment&lt;/li&gt;&lt;li&gt;Knowledge in ACF2/RACF, CICS v5.5, Mainframe MQ v9&lt;/li&gt;&lt;li&gt;Experience in SMPE and ISV tools installation and migration&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Good to Have:&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Database DR, Cloning, Backup/Recovery, HA and Point In Time Recovery (PITR) expertise&lt;/li&gt;&lt;li&gt;Experience with Database ISV Tools and Products like OmegaMon and AdminTool&lt;/li&gt;&lt;li&gt;Application related performance tuning; Write, modify, and debug database specific SQL queries&lt;/li&gt;&lt;li&gt;DBA related utilities like Unload, Load, DSNJU003, DSNJU004,Reorg, Runstats, image copy&lt;/li&gt;&lt;li&gt;REXX, CLIST, JCL, JES and Assembler knowledge&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Essential:&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Define and provide guidance for database creation, configuration, upgrade, patches and refresh requirements&lt;/li&gt;&lt;li&gt;Recommend operating system and database performance monitoring, tuning and configuration changes&lt;/li&gt;&lt;li&gt;Experience with database physical and logical design, query analysis and optimization&lt;/li&gt;&lt;li&gt;Provide technical assistance and subject matter expertise to applications groups. Provide database storage management and capacity management recommendations&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Minimum Qualification:&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;15 years of full-time education&lt;/li&gt;&lt;li&gt;Minimum percentile of 50% in 10th, 12th, UG &amp;amp; PG (if applicable)&lt;/li&gt;&lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Tata Consultancy&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Mumbai&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/812163/mainframe-db2-dba-admin-not-developer-at-tata-consultancy/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/812163/mainframe-db2-dba-admin-not-developer-at-tata-consultancy/</link>
  <title>[Full Time] Mainframe DB2 DBA Admin Not Developer at Tata Consultancy</title>
  <dc:date>Mon, 16 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/812762/test-lead-at-bahwan-cybertek/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;Roles and Responsibility&lt;/b&gt;&lt;span&gt;&lt;/span&gt;&lt;div&gt;&lt;/div&gt;&lt;ul&gt;&lt;li&gt;Design and develop automation frameworks and scripts for testing purposes.&lt;/li&gt;&lt;li&gt;Utilize web driver IO and Rest Assured API to ensure efficient testing processes.&lt;/li&gt;&lt;li&gt;Collaborate with cross-functional teams to identify and prioritize testing requirements.&lt;/li&gt;&lt;li&gt;Develop and maintain test cases, test scripts, and test data.&lt;/li&gt;&lt;li&gt;Analyze test results and report defects to the development team.&lt;/li&gt;&lt;li&gt;Ensure timely delivery of high-quality testing services.&lt;/li&gt;&lt;/ul&gt;&lt;div&gt;&lt;b&gt;Job Requirements&lt;/b&gt;&lt;span&gt;&lt;/span&gt;&lt;/div&gt;&lt;ul&gt;&lt;li&gt;Bachelor&apos;s degree in Computer Science, Information Technology, or related field (B.Tech/MCA/B.Sc).&lt;/li&gt;&lt;li&gt;Minimum 8 years of experience in software testing, focusing on automation testing.&lt;/li&gt;&lt;li&gt;Strong knowledge of automation frameworks, web driver IO, and Rest Assured API.&lt;/li&gt;&lt;li&gt;Excellent problem-solving skills and attention to detail.&lt;/li&gt;&lt;li&gt;Ability to work in an agile environment and collaborate with teams.&lt;/li&gt;&lt;li&gt;Strong communication and interpersonal skills.&lt;/li&gt;&lt;li&gt;Notice period: Immediate joiner.&lt;/li&gt;&lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Bahwan CyberTek&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Chennai&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/812762/test-lead-at-bahwan-cybertek/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/812762/test-lead-at-bahwan-cybertek/</link>
  <title>[Full Time] Test Lead at Bahwan CyberTek</title>
  <dc:date>Mon, 16 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/812916/mainframe-z-os-jcl-admin-only-at-tata-consultancy/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&lt;strong&gt;Experience : 5 to 7 Years&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Skillset :Mainframe Z/os JCL&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Location: Kolkata, Pune, Bangalore&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Must-Have&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;-Strong knowledge in SMPE&lt;/p&gt;&lt;p&gt;-Product installation and ISV products&lt;/p&gt;&lt;p&gt;-Strong experience in zOS versions upgrades,&lt;/p&gt;&lt;p&gt;-Installation of patches and fixes&lt;/p&gt;&lt;p&gt;-parallel Sysplex concepts&lt;/p&gt;&lt;p&gt;-System parameters/Parmlib maintenance&lt;/p&gt;&lt;p&gt;-zOS MF configuration and setup&lt;/p&gt;&lt;p&gt;-Experience mainframe LPAR builds&lt;/p&gt;&lt;p&gt;-Managing incidents on z/OS related issues&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Good-to-Have&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;1.Strong communication skills&lt;/p&gt;&lt;p&gt;2.Flexible with shifts and environments&lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Tata Consultancy&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Kolkata&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/812916/mainframe-z-os-jcl-admin-only-at-tata-consultancy/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/812916/mainframe-z-os-jcl-admin-only-at-tata-consultancy/</link>
  <title>[Full Time] Mainframe Z/os JCL Admin Only at Tata Consultancy</title>
  <dc:date>Mon, 16 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/812948/software-engineer-python-developer-at-bahwan-cybertek/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&lt;span&gt;We are looking for an experienced Python Developer with 5 to 7 years experience with strong scripting capabilities and hands-on experience in database interaction. The candidate will be responsible for developing automation scripts, data processing workflows, and backend utilities, while collaborating closely with application, data, and infrastructure teams.&lt;/span&gt;&lt;/p&gt;&lt;div&gt;&lt;/div&gt;&lt;p&gt;&lt;strong&gt;Key Responsibilities&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Develop, enhance, and maintain &lt;strong&gt;Python scripts &lt;/strong&gt;for automation, data processing, and system integration&lt;/li&gt;&lt;li&gt;Write &lt;strong&gt;efficient, reusable, and well-documented Python code&lt;/strong&gt;following best practices&lt;/li&gt;&lt;li&gt;Design and execute&lt;strong&gt;database queries&lt;/strong&gt;for data extraction, transformation, and validation&lt;/li&gt;&lt;li&gt;Integrate Python scripts with &lt;strong&gt;relational databases&lt;/strong&gt;for batch and near real-time processing&lt;/li&gt;&lt;li&gt;Perform &lt;strong&gt;data validation, reconciliation, and exception handling&lt;/strong&gt;&lt;/li&gt;&lt;li&gt;Optimize scripts for&lt;strong&gt;performance, scalability, and reliability&lt;/strong&gt;&lt;/li&gt;&lt;li&gt;Collaborate with cross-functional teams to understand requirements and deliver solutions&lt;/li&gt;&lt;li&gt;Troubleshoot production issues and provide timely fixes&lt;/li&gt;&lt;li&gt;Ensure adherence to &lt;strong&gt;coding standards, security guidelines, and compliance requirements&lt;/strong&gt;&lt;/li&gt;&lt;li&gt;Support deployment and execution of scripts in &lt;strong&gt;Linux / Windows environments&lt;/strong&gt;&lt;/li&gt;&lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Bahwan CyberTek&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Mumbai&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/812948/software-engineer-python-developer-at-bahwan-cybertek/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/812948/software-engineer-python-developer-at-bahwan-cybertek/</link>
  <title>[Full Time] Software Engineer - Python Developer at Bahwan CyberTek</title>
  <dc:date>Mon, 16 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/813856/qa-professional-azure-and-databricks-at-infobeans/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt;&lt;div&gt;&lt;div&gt;&lt;b&gt;&lt;span&gt;Key Responsibilities&lt;/span&gt;&lt;/b&gt;&lt;br&gt;&lt;span&gt;&lt;strong&gt;1) Test Strategy &amp;amp; Planning&lt;/strong&gt;&lt;/span&gt;&lt;ul&gt;&lt;li&gt;Define and maintain &lt;b&gt;test strategies&lt;/b&gt; for data pipelines, notebooks, jobs, and APIs across Databricks &amp;amp; Azure.&lt;/li&gt;&lt;li&gt;Translate &lt;b&gt;business rules&lt;/b&gt; and &lt;b&gt;data quality SLAs&lt;/b&gt; into testable acceptance criteria.&lt;/li&gt;&lt;li&gt;Establish &lt;b&gt;risk-based&lt;/b&gt; test plans across functional, integration, system, and &lt;b&gt;non-functional&lt;/b&gt; (performance, scalability, security) testing.&lt;/li&gt;&lt;/ul&gt;&lt;span&gt;&lt;strong&gt;2) Data &amp;amp; Pipeline Testing (Databricks / Spark / Delta)&lt;/strong&gt;&lt;/span&gt;&lt;ul&gt;&lt;li&gt;Develop &lt;b&gt;automated tests&lt;/b&gt; for &lt;b&gt;Spark&lt;/b&gt; transformations (e.g., &lt;b&gt;PySpark&lt;/b&gt;, &lt;b&gt;Spark SQL&lt;/b&gt;) and &lt;b&gt;Delta Lake&lt;/b&gt; features (MERGE, time travel, schema evolution).&lt;/li&gt;&lt;li&gt;Implement &lt;b&gt;data quality checks&lt;/b&gt; (completeness, accuracy, consistency, referential integrity, drift detection).&lt;/li&gt;&lt;li&gt;Validate &lt;b&gt;batch and streaming&lt;/b&gt; pipelines, job dependencies, and SLAs.&lt;/li&gt;&lt;li&gt;Create &lt;b&gt;reproducible test datasets&lt;/b&gt; and use &lt;b&gt;Delta Live Tables&lt;/b&gt; (if applicable) to validate expectations.&lt;/li&gt;&lt;/ul&gt;&lt;span&gt;&lt;strong&gt;3) Azure Cloud QA&lt;/strong&gt;&lt;/span&gt;&lt;ul&gt;&lt;li&gt;Test data flows across &lt;b&gt;Azure Data Factory / Synapse pipelines&lt;/b&gt;, Databricks Jobs, &lt;b&gt;Azure Storage/ADLS&lt;/b&gt;, &lt;b&gt;Azure SQL/SQL MI&lt;/b&gt;, &lt;b&gt;Event Hub/Kafka&lt;/b&gt;, and &lt;b&gt;Key Vault&lt;/b&gt; access patterns.&lt;/li&gt;&lt;li&gt;Verify &lt;b&gt;IAM/ACLs/Unity Catalog&lt;/b&gt; permissions, secrets handling, and &lt;b&gt;network boundary&lt;/b&gt; controls.&lt;/li&gt;&lt;li&gt;Validate &lt;b&gt;infrastructure configurations&lt;/b&gt; across environments (Dev/Test/Prod) and support &lt;b&gt;IaC&lt;/b&gt; validation (e.g., Bicep/Terraform plan diffs).&lt;/li&gt;&lt;/ul&gt;&lt;span&gt;&lt;strong&gt;4) Automation &amp;amp; DevOps&lt;/strong&gt;&lt;/span&gt;&lt;ul&gt;&lt;li&gt;Integrate tests into &lt;b&gt;CI/CD&lt;/b&gt; (Azure DevOps/GitHub Actions): unit, integration, data-validation, and &lt;b&gt;post-deployment&lt;/b&gt; smoke tests.&lt;/li&gt;&lt;li&gt;Implement &lt;b&gt;quality gates&lt;/b&gt; (coverage thresholds, schema checks, contract tests, static checks like Pylint/flake8).&lt;/li&gt;&lt;li&gt;Build &lt;b&gt;test utilities&lt;/b&gt; and shared libraries for data assertions, fixtures, and synthetic data generation.&lt;/li&gt;&lt;li&gt;Use &lt;b&gt;feature branches&lt;/b&gt;, PR checks, and &lt;b&gt;merge policies&lt;/b&gt; to enforce quality.&lt;/li&gt;&lt;/ul&gt;&lt;span&gt;&lt;strong&gt;5) Observability &amp;amp; Reliability&lt;/strong&gt;&lt;/span&gt;&lt;ul&gt;&lt;li&gt;Instrument tests and pipelines with &lt;b&gt;logging &amp;amp; metrics&lt;/b&gt; (e.g., Azure Monitor, Log Analytics, Databricks metrics).&lt;/li&gt;&lt;li&gt;Define &lt;b&gt;SLIs/SLOs&lt;/b&gt; for data quality and &lt;b&gt;pipeline reliability&lt;/b&gt;; participate in incident reviews and RCA.&lt;/li&gt;&lt;li&gt;Establish &lt;b&gt;quality dashboards&lt;/b&gt; (test pass rate, DQ incidents, recovery time, flaky test rate).&lt;/li&gt;&lt;/ul&gt;&lt;span&gt;&lt;strong&gt;6) Collaboration &amp;amp; Governance&lt;/strong&gt;&lt;/span&gt;&lt;ul&gt;&lt;li&gt;Partner with &lt;b&gt;data engineers, platform engineers, product owners, and data stewards&lt;/b&gt;.&lt;/li&gt;&lt;li&gt;Contribute to &lt;b&gt;test data management&lt;/b&gt; practices and a &lt;b&gt;data contract&lt;/b&gt; approach with upstream/downstream teams.&lt;/li&gt;&lt;li&gt;Document test plans, runbooks, and &lt;b&gt;traceability&lt;/b&gt; from requirements to tests.&lt;/li&gt;&lt;/ul&gt;&lt;br&gt;&lt;b&gt;&lt;span&gt;Required Qualifications&lt;/span&gt;&lt;/b&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;&lt;b&gt;3-8+ years&lt;/b&gt; in QA/Test Engineering with &lt;b&gt;2+ years&lt;/b&gt; in &lt;b&gt;data platform&lt;/b&gt; or &lt;b&gt;Spark-based&lt;/b&gt; environments.&lt;/li&gt;&lt;li&gt;Hands-on with &lt;b&gt;Databricks&lt;/b&gt;: Notebooks, Jobs, Delta Lake, cluster policies.&lt;/li&gt;&lt;li&gt;Strong &lt;b&gt;Python&lt;/b&gt; for tests/utilities; solid &lt;b&gt;SQL&lt;/b&gt; for validation and profiling.&lt;/li&gt;&lt;li&gt;Experience testing &lt;b&gt;ETL/ELT&lt;/b&gt; patterns, data partitioning, late-arriving data, slowly changing dimensions, idempotency.&lt;/li&gt;&lt;li&gt;&lt;b&gt;Azure&lt;/b&gt; experience: &lt;b&gt;Data Factory&lt;/b&gt;, &lt;b&gt;ADLS Gen2&lt;/b&gt;, &lt;b&gt;Key Vault&lt;/b&gt;, &lt;b&gt;Azure DevOps/GitHub&lt;/b&gt;.&lt;/li&gt;&lt;li&gt;CI/CD for data: pipelines, artifacts, environments, approvals, &lt;b&gt;test stages&lt;/b&gt; and &lt;b&gt;post-deploy&lt;/b&gt; validations.&lt;/li&gt;&lt;li&gt;Familiar with &lt;b&gt;schema validation &lt;/b&gt;and &lt;b&gt;API testing&lt;/b&gt; (REST/Databricks Jobs API).&lt;/li&gt;&lt;li&gt;Clear understanding of &lt;b&gt;security&lt;/b&gt; (RBAC/ABAC, managed identities), &lt;b&gt;cost &amp;amp; performance&lt;/b&gt; considerations.&lt;/li&gt;&lt;/ul&gt;&lt;span&gt;&lt;strong&gt;Nice-to-Have&lt;/strong&gt;&lt;/span&gt;&lt;ul&gt;&lt;li&gt;&lt;b&gt;Unity Catalog&lt;/b&gt;, data lineage, and governance testing.&lt;/li&gt;&lt;li&gt;&lt;b&gt;Streaming&lt;/b&gt; (Structured Streaming, Delta Live Tables), &lt;b&gt;event-driven&lt;/b&gt; validation.&lt;/li&gt;&lt;li&gt;&lt;b&gt;Performance testing&lt;/b&gt; of Spark jobs (cluster sizing, shuffle hotspots, skew).&lt;/li&gt;&lt;li&gt;Exposure to &lt;b&gt;privacy&lt;/b&gt; &amp;amp; &lt;b&gt;regulatory&lt;/b&gt; controls (PII masking, GDPR/CCPA, SOX).&lt;/li&gt;&lt;/ul&gt;&lt;/div&gt;&lt;div&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;div&gt;&lt;div&gt;&lt;/div&gt;&lt;/div&gt;&lt;b&gt;Location - &lt;/b&gt;Pune,Nagar,Indore,Bengaluru&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Architect / Designer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Infobeans&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Indore&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/813856/qa-professional-azure-and-databricks-at-infobeans/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/813856/qa-professional-azure-and-databricks-at-infobeans/</link>
  <title>[Full Time] QA Professional - Azure and Databricks at Infobeans</title>
  <dc:date>Mon, 16 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/813170/software-engineer-python-developer-at-bahwan-cybertek/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&lt;span&gt;We are looking for an experienced Python Developer with 5 to 7 years experience with strong scripting capabilities and hands-on experience in database interaction. The candidate will be responsible for developing automation scripts, data processing workflows, and backend utilities, while collaborating closely with application, data, and infrastructure teams.&lt;/span&gt;&lt;/p&gt;&lt;div&gt;&lt;/div&gt;&lt;p&gt;&lt;strong&gt;&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;&lt;strong&gt;Key Responsibilities:&lt;/strong&gt;&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Develop, enhance, and maintain&lt;strong&gt;Python scripts&lt;/strong&gt;for automation, data processing, and system integration&lt;/li&gt;&lt;li&gt;Write&lt;strong&gt;efficient, reusable, and well-documented Python code&lt;/strong&gt;following best practices&lt;/li&gt;&lt;li&gt;Design and execute&lt;strong&gt;database queries&lt;/strong&gt;for data extraction, transformation, and validation&lt;/li&gt;&lt;li&gt;Integrate Python scripts with&lt;strong&gt;relational databases&lt;/strong&gt;for batch and near real-time processing&lt;/li&gt;&lt;li&gt;Perform&lt;strong&gt;data validation, reconciliation, and exception handling&lt;/strong&gt;&lt;/li&gt;&lt;li&gt;Optimize scripts for&lt;strong&gt;performance, scalability, and reliability&lt;/strong&gt;&lt;/li&gt;&lt;li&gt;Collaborate with cross-functional teams to understand requirements and deliver solutions&lt;/li&gt;&lt;li&gt;Troubleshoot production issues and provide timely fixes&lt;/li&gt;&lt;li&gt;Ensure adherence to&lt;strong&gt;coding standards, security guidelines, and compliance requirements&lt;/strong&gt;&lt;/li&gt;&lt;li&gt;Support deployment and execution of scripts in&lt;strong&gt;Linux / Windows environments&lt;/strong&gt;&lt;/li&gt;&lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Bahwan CyberTek&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Mumbai&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/813170/software-engineer-python-developer-at-bahwan-cybertek/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/813170/software-engineer-python-developer-at-bahwan-cybertek/</link>
  <title>[Full Time] Software Engineer - Python Developer at Bahwan CyberTek</title>
  <dc:date>Mon, 16 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/813091/net-angular-professional-at-capgemini/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;strong&gt; &lt;span&gt; Job Title: &lt;/span&gt; &lt;span&gt; .Net Angular Developer &lt;/span&gt; &lt;/strong&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; Experience: 6 to 9 years &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; Location:Bangalore &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; We are looking for a skilled &lt;strong&gt; &lt;/strong&gt;&lt;/span&gt;&lt;/div&gt;&lt;strong&gt; &lt;/strong&gt;&lt;/li&gt;&lt;strong&gt; &lt;/strong&gt;&lt;li&gt;&lt;strong&gt; NET Developer &lt;/strong&gt; to design, develop, and maintain &lt;strong&gt; web applications &lt;/strong&gt; using &lt;strong&gt; Microsoft .NET technologies &lt;/strong&gt; &lt;/li&gt; &lt;li&gt; The ideal candidate will have strong programming skills in &lt;strong&gt; C# &lt;/strong&gt; , &lt;strong&gt; ASP.NET &lt;/strong&gt; , and &lt;strong&gt; JavaScript frameworks &lt;/strong&gt; like &lt;strong&gt; Angular &lt;/strong&gt; , with a solid understanding of &lt;strong&gt; web application development &lt;/strong&gt; and &lt;strong&gt; software engineering best practices &lt;/strong&gt; &lt;/li&gt; &lt;li&gt; &lt;strong&gt; Key Responsibilities &lt;/strong&gt; Develop and maintain &lt;strong&gt; web applications &lt;/strong&gt; using &lt;strong&gt; ASP.NET Core &lt;/strong&gt; , &lt;strong&gt; C# &lt;/strong&gt; , and &lt;strong&gt; Angular &lt;/strong&gt; &lt;/li&gt; &lt;li&gt; Implement &lt;strong&gt; RESTful APIs &lt;/strong&gt; and integrate with front-end frameworks &lt;/li&gt; &lt;li&gt; Design and optimize &lt;strong&gt; data models &lt;/strong&gt; , write efficient &lt;strong&gt; SQL queries &lt;/strong&gt; , and ensure database performance &lt;/li&gt; &lt;li&gt; Collaborate with cross-functional teams to gather &lt;strong&gt; user requirements &lt;/strong&gt; and translate them into &lt;strong&gt; functional specifications &lt;/strong&gt; &lt;/li&gt; &lt;li&gt; Ensure &lt;strong&gt; code quality &lt;/strong&gt; , follow &lt;strong&gt; coding conventions &lt;/strong&gt; , and participate in &lt;strong&gt; code reviews &lt;/strong&gt; &lt;/li&gt; &lt;li&gt; Write &lt;strong&gt; unit and integration tests &lt;/strong&gt; ; support &lt;strong&gt; test automation &lt;/strong&gt; &lt;/li&gt; &lt;li&gt; Maintain &lt;strong&gt; technical documentation &lt;/strong&gt; and adhere to &lt;strong&gt; software development methodologies &lt;/strong&gt; (Agile/Scrum) &lt;/li&gt; &lt;li&gt; Troubleshoot and resolve issues in development and production environments &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;strong&gt; Required Skills &lt;/strong&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; Strong experience in &lt;strong&gt; &lt;/strong&gt;&lt;/li&gt;&lt;strong&gt; &lt;/strong&gt;&lt;li&gt;&lt;strong&gt; .NET Framework &lt;/strong&gt; and &lt;strong&gt; &lt;/strong&gt;&lt;/li&gt;&lt;strong&gt; &lt;/strong&gt;&lt;li&gt;&lt;strong&gt; .NET Core &lt;/strong&gt; &lt;/li&gt; &lt;li&gt; Proficiency in &lt;strong&gt; C# &lt;/strong&gt; , &lt;strong&gt; ASP,NET MVC &lt;/strong&gt; , and &lt;strong&gt; Web API &lt;/strong&gt; development &lt;/li&gt; &lt;li&gt; Front-end experience with &lt;strong&gt; Angular &lt;/strong&gt; and &lt;strong&gt; JavaScript &lt;/strong&gt; &lt;/li&gt; &lt;li&gt; Solid understanding of &lt;strong&gt; object-oriented programming (OOP) &lt;/strong&gt; and &lt;strong&gt; design principles &lt;/strong&gt; &lt;/li&gt; &lt;li&gt; Knowledge of &lt;strong&gt; SQL Server &lt;/strong&gt; and relational database design &lt;/li&gt; &lt;li&gt; Familiarity with &lt;strong&gt; web application frameworks &lt;/strong&gt; and &lt;strong&gt; responsive design &lt;/strong&gt; &lt;/li&gt; &lt;li&gt; Good understanding of &lt;strong&gt; software development lifecycle &lt;/strong&gt; and &lt;strong&gt; version control &lt;/strong&gt; (Git) &lt;/li&gt; &lt;li&gt; &lt;strong&gt; Preferred Skills &lt;/strong&gt; Experience with &lt;strong&gt; Azure &lt;/strong&gt; or other &lt;strong&gt; cloud platforms &lt;/strong&gt; &lt;/li&gt; &lt;li&gt; Exposure to &lt;strong&gt; CI/CD pipelines &lt;/strong&gt; and &lt;strong&gt; DevOps practices &lt;/strong&gt; &lt;/li&gt; &lt;li&gt; Knowledge of &lt;strong&gt; security best practices &lt;/strong&gt; for web applications &lt;/li&gt; &lt;li&gt; Familiarity with &lt;strong&gt; unit testing frameworks &lt;/strong&gt; (xUnit, NUnit) and &lt;strong&gt; mocking tools &lt;/strong&gt; &lt;/li&gt; &lt;li&gt; Understanding of &lt;strong&gt; performance optimization &lt;/strong&gt; and &lt;strong&gt; scalability &lt;/strong&gt; &lt;/li&gt; &lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Capgemini&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Mumbai&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/813091/net-angular-professional-at-capgemini/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/813091/net-angular-professional-at-capgemini/</link>
  <title>[Full Time] .Net Angular Professional at Capgemini</title>
  <dc:date>Mon, 16 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/816655/application-support-engineer-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;Project Role :&lt;/b&gt;Application Support Engineer&lt;b&gt;Project Role Description :&lt;/b&gt;Act as software detectives, provide a dynamic service identifying and solving issues within multiple components of critical business systems. &lt;b&gt;Must have skills :&lt;/b&gt;SAP Basis Administration&lt;b&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;b&gt;Educational Qualification :&lt;/b&gt; 15 years full time educationRole:SAP RISE Full Stack Engineer Role Overview:We are seeking SAP Basis Full Stack Engineer role to build and integrate SAP Applications, overall health, performance, and architecture of SAP system landscape. Role requires managing end-to-end SAP landscapes effectively from installation and configuration to performance optimization, upgrade, and troubleshooting. This role requires the ability to conceptualize and implement AI-driven enhancements to traditional SAP Landscapes, driving the next generation of enterprise automation and intelligence with AI-first mindset.&lt;br&gt;Years of Experience:6 to 15 Years &lt;b&gt;Key Responsibilities&lt;/b&gt;:-Design SAP landscapes and knowledge on basic network concepts for SAP workload on Hyper scaler (Azure/AWS/GCP) and On-premises and design compatibility for file system layout for SAP environment.-Hands on experience on SAP eco-system High-Availability Cluster &amp;amp; Disaster Recovery setup and troubleshooting.-While the business situation / problem is given you will have responsibility to visualize innovative solutions leveraging GenAI &amp;amp; Agentic architecture and the best of available technologies from SAP and other providers. -Engage in end-to-end AI-enabled project delivery:requirement gathering, solution design, implementation, testing, and business adoption.-Work with real-time AI platforms, integrating AI insights into enterprise decision-making workflows -Hands on Application lifecycle management- Automated build and validations, upgrades, patching and performance optimization of SAP S/4 HANA Suites (S/4HANA, BW/4HANA) and SAP Business suite (ECC, BW, APO, PI/PO, MII, SLT, GRC, BI/BODS, Solution manager) for On-premises, Cloud &amp;amp; SAP RISE hosting.-Good understanding and hands on of SAP BTP services administration (Integration Suite, ADS, Datasphere, BWZ, HANA Cloud, Cloud ALM, CTMS) and other SaaS Solutions like Ariba, Success factor, IBP, etc and worked on integrations with Cloud Connector and Backend SAP &amp;amp; 3rd party systems.-Hands on Experience on OS/DB migration &amp;amp; upgrades with knowledge on SAP Service Market Place, Maintenance Planner, SWPM, SUM tools. Understand and support Pre-Go live checks and post go-live checks with On-prem, Cloud &amp;amp; SAP RISE hosting.-SAP stack and platform Security good knowledge on SAP and HANA security including SSL/TLS/SSFS/LSS/KMS/SSO/SAML.-Knowhow on Cloud Advanced security for SAP (Roles, IAM, IDP, IPS, Active directory, etc.) -Knowledge on SAP supported output management systems.-Know how on Integration of friends of SAP OpenText (VIM, Archive Server), Vertex, RedWood, Control-M, Revtrack, Blackline, Seeburger, StreamServe, WinShuttle, Greenlight, etc.-Perform SAP Solution manager setups like Basic configurations, CTMS, LMDB, backbone connections, SLD, etc. -Act as an SMA (Subject Matter Advisor) for multiple teams. Good to Have:-Certifications:(At least one cloud and one SAP from the below list)-Microsoft Azure Fundamentals / AWS Certified Cloud Practitioner Foundational / Google Cloud Digital Leader -SAP HANA Database Administration-SAP S/4HANA Conversion and SAP System Upgrade-Solution Transformation Consultant with SAP Cloud ALM-SAP Build Work Zone Implementation and Administration-Microsoft Certified:Azure Administrator Associate / AWS Certified Solution Architect Associate-SAP S/4HANA Cloud Private EditionKey Skills and Knowledge:-Technical Expertise:Understanding and knowledge of SAP platforms and technologies, including Cloud Solutions, SAP R3, and S/4 HANA, BW/4 HANA, Administration ABAP &amp;amp; JAVA and SAP BTP (Business Technology Platform)/SAP SAAS with SAP Databases (HANA/SYBASE IQ/MAX DB).-Business Acumen:Build connect with client and align with business priorities. -Skills:Excellent communication and collaboration skills to effectively work with diverse teams, including SAP module leads and developers. -Understanding of Python, Ansible, Terraform &amp;amp; YAML scripting. -Ability to bridge technical AI concepts with business goals, ensuring timely delivery and stakeholder alignment.-Hands-on experience in GenAI (LLMs, embeddings, vector databases) and Agentic AI applications (multi-agent systems, planning, reasoning demonstrated through project implementations-Solid grasp of prompt engineering, AI evaluation frameworks, value realization in AI projects-Knowledge of CI/CD pipelines and DevOps practices in a cloud environment.-Ability to work independently and as part of a team.-Effective communicator with ability to drive critical issues (P1/P2 or Migration) with cross-functional, Multi-vendor (SAP, 3rd parties) and client vendors.-Ability to work under pressure and manage multiple priorities effectively. -Familiar with ITIL concepts of Service Management, Change Management and Root Cause Analysis and using the ITIL tools like ServiceNow, Jira, MS ADO, etc.&lt;br&gt;Qualifications:-Minimum 15 years full time education with bacheloracs degree in computer science or related field.Keywords for Recruitment / ATS OptimizationSAP Basis Administrator, SAP HANA Administrator, SAP Basis Administration, SAP HANA Administrator, SAP BASIS, SAP HANA, SAP HANA Database Admin, SAP BTP, SAP BTP Administrator, SAP BTP Administration, SAP Cloud Migration, SAP RISE, SAP RISE Migration, SAP RISE Implementation, SAP Cloud ALM Administrator, SAP Cloud ALM Administration, Full stack SAP BASIS Engineer, SAP BASIS FSE, SAP S/4HANA Administrator, SAP S/4HANA Administration, Agentic AI, Gen AI.&lt;b&gt;Qualification&lt;/b&gt; 15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Kolkata&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/816655/application-support-engineer-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/816655/application-support-engineer-at-accenture/</link>
  <title>[Full Time] Application Support Engineer at Accenture</title>
  <dc:date>Fri, 13 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/811402/power-bi-developer-at-crisil/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt;Role Summary&lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;We are looking for a highly skilled Power BI professional who can design, build, and implement end-to-end dashboards that translate business questions into clear, actionable insights.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;This role requires more than technical proficiency - the ideal candidate should be excellent at understanding business requirements, structuring them into robust data models, and delivering sustainable, scalable BI solutions. A strong communication is a must.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Business users may not always articulate system-level or design-level requirements clearly. The resource must bridge the gap between Ask and Final solution&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt;Key Responsibilities&lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Business Understanding &amp;amp; Requirement Translation&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Engage with business stakeholders to understand reporting needs, KPIs, and decision drivers.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Translate business requirements into well-structured BI solutions.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Challenge assumptions, ask the right questions, and propose better data and visualization approaches where needed.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Document business logic, assumptions, and metrics clearly.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Power BI Dashboard Design &amp;amp; Development&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Design and develop interactive, user-friendly, and performance-optimized dashboards using Power BI.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Build strong semantic models with reusable measures and standardized KPIs.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Create dashboards that are intuitive for non-technical business users.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Apply best practices in layout, storytelling, drill-downs, filters, and navigation.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt;Data Modeling &amp;amp; DAX&lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Design efficient data models (star/snowflake schemas where appropriate).&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Write optimized and maintainable DAX measures for complex calculations.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Ensure accuracy, consistency, and performance of calculations across reports.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Handle incremental refresh, large datasets, and performance tuning.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt;Data Integration &amp;amp; Transformation&lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Connect Power BI to multiple data sources (databases, Excel, CSVs, APIs, etc.).&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Use Power Query (M) for data cleaning, transformation, and shaping.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Ensure data quality, reconciliation, and consistency across sources.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt;Deployment, Governance &amp;amp; Maintenance&lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Manage Power BI Service workspaces, datasets, refresh schedules, and access controls.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Implement row-level security (RLS) where required.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Ensure dashboards are scalable, maintainable, and aligned with governance standards.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Provide ongoing support, enhancements, and performance improvements.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt;Collaboration &amp;amp; Communication&lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Work closely with business teams, data teams, and IT as required.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Explain dashboards, metrics, and logic in simple, business-friendly language.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Train users on dashboard usage and interpretation where needed.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt;Required Skills &amp;amp; Experience&lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt;Must-Have &lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Proven hands-on expertise in Power BI (Power BI Desktop + Power BI Service) &lt;b&gt;list all projects undertaken and deployed&lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Minimum experience of least &lt;b&gt;5+ years as a stand-alone contributor&lt;/b&gt; on Power BI solution; &lt;b&gt;over all experience of 9+ years&lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt;Strong experience in data modeling and DAX.&lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Strong command of Power Query / M language.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Experience building dashboards from scratch, not just modifying existing ones.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Ability to handle poorly defined requirements and convert them into structured solutions.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Strong analytical thinking and problem-solving skills.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Excellent communication skills with non-technical stakeholders.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt;Good to Have&lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Experience working with large datasets and performance optimization.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Exposure to SQL and relational databases.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Understanding of BI best practices, KPI frameworks, and reporting governance.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Prior experience in consulting, analytics, or business-facing BI roles.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Experience in operational, or management reporting is a plus.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt;What Success Looks Like in This Role&lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Dashboards are trusted by business users and actively used for decision-making.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Business users have a sense of delight, confidence and ease when using the tool&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Minimal rework due to clear requirement understanding upfront.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Scalable and reusable data models rather than one-off reports.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Business users feel understood even when they can t articulate requirements perfectly.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Clean, documented, and maintainable Power BI assets.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt;Ideal Candidate Profile&lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Thinks like an analyst, designs like an architect, and executes like a developer.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Comfortable saying this can be done better and backing it with logic.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Detail-oriented, structured, and obsessed with clarity.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Enjoys converting chaos into clean dashboards.&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Financial Services&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Analyst&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Crisil&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Mumbai&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/811402/power-bi-developer-at-crisil/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/811402/power-bi-developer-at-crisil/</link>
  <title>[Full Time] Power BI Developer at Crisil</title>
  <dc:date>Fri, 13 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/811349/gen-ai-architect-at-capgemini/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt; Your Role &lt;/b&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;span&gt; Total Experience: 14-16 Years &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; Location: Mumbai/Pune/Bangalore &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; &lt;strong&gt; Architect end-to-end GenAI solutions &lt;/strong&gt; for mediumlarge accounts across complex tech landscapes (J2EE/.NET/ERP). &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;strong&gt; Define GenAI architecture &lt;/strong&gt; using Azure OpenAI / AWS Bedrock / GCP services. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;strong&gt; Lead design decisions &lt;/strong&gt; evaluate options, perform cost/ROI analysis, and act as design authority. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;strong&gt; Drive innovation through POCs/pilots &lt;/strong&gt; , explore new technologies, and guide delivery teams. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;strong&gt; Provide strategic architectural recommendations &lt;/strong&gt; with strong stakeholder communication &amp;amp; presentation skills. &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;b&gt; &lt;b&gt; Your Profile &lt;/b&gt; &lt;/b&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; &lt;strong&gt; Python + AI/ML stack &lt;/strong&gt; (pandas, numpy, scikit-learn) + app deployment (Flask/FastAPI/Streamlit). &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;strong&gt; Hands-on with GenAI concepts &lt;/strong&gt; prompt engineering, RAG, LangChain, VectorDB. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;strong&gt; Cloud expertise &lt;/strong&gt; Azure/AWS/GCP for storage, IAM/Key Vaults, deployments. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;strong&gt; GenAI application deployment &lt;/strong&gt; experience on at least one hyperscaler. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;strong&gt; Database knowledge &lt;/strong&gt; PostgreSQL, SQL queries, and basic DB design. &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Architect / Designer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Capgemini&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Mumbai&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/811349/gen-ai-architect-at-capgemini/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/811349/gen-ai-architect-at-capgemini/</link>
  <title>[Full Time] Gen AI Architect at Capgemini</title>
  <dc:date>Fri, 13 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/811516/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;Project Role :&lt;/b&gt;Data Architect&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;b&gt;Must have skills :&lt;/b&gt;Data Engineering&lt;b&gt;Good to have skills :&lt;/b&gt;Apache Spark, Bigdata Analytics Architecture and Design, Databricks Unified Data Analytics Platform&lt;br&gt;Minimum &lt;b&gt;15&lt;/b&gt; year(s) of experience is required&lt;b&gt;Educational Qualification :&lt;/b&gt; 15 years full time education&lt;b&gt;&lt;b&gt;Summary&lt;/b&gt;:&lt;/b&gt; As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration. You will collaborate with various teams to ensure that the data architecture aligns with business objectives and technical specifications, while also addressing any challenges that arise in the data management process. Your role will require you to engage in discussions about data strategy and provide insights that drive effective decision-making across the organization. &lt;b&gt;Roles &amp;amp; Responsibilities:&lt;/b&gt; &lt;li&gt;Expected to be a Subject Matter Expert with deep knowledge and experience.&lt;/li&gt;&lt;li&gt;Should have influencing and advisory skills.&lt;/li&gt;&lt;li&gt;Responsible for team decisions.&lt;/li&gt;&lt;li&gt;Engage with multiple teams and contribute on key decisions.&lt;/li&gt;&lt;li&gt;Expected to provide solutions to problems that apply across multiple teams.&lt;/li&gt;&lt;li&gt;Facilitate workshops and discussions to gather requirements and feedback from stakeholders.&lt;/li&gt;&lt;li&gt;Continuously evaluate and improve data architecture practices to enhance efficiency and effectiveness. &lt;b&gt;Professional &amp;amp; Technical Skills:&lt;/b&gt; &lt;br&gt;&lt;b&gt;Must To Have Skills:&lt;/b&gt;&lt;br&gt;&lt;/li&gt;&lt;li&gt; Proficiency in Data Engineering.&lt;b&gt;Good To Have Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;Experience with Apache Spark, Bigdata Analytics Architecture and Design, Databricks Unified Data Analytics Platform.&lt;/li&gt;&lt;li&gt;Strong understanding of data modeling techniques and best practices.&lt;/li&gt;&lt;li&gt;Experience with data integration tools and ETL processes.&lt;/li&gt;&lt;li&gt;Familiarity with cloud-based data storage solutions and architectures. &lt;b&gt;Additional Information:&lt;/b&gt; &lt;/li&gt;&lt;li&gt;The candidate should have minimum 15 years of experience in Data Engineering.&lt;/li&gt;&lt;li&gt;This position is based at our Bengaluru office.&lt;/li&gt;&lt;li&gt;A 15 years full time education is required.&lt;b&gt;Qualification&lt;/b&gt; 15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/811516/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/811516/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Fri, 13 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/811529/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;Project Role :&lt;/b&gt;Data Architect&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;b&gt;Must have skills :&lt;/b&gt;Data Engineering&lt;b&gt;Good to have skills :&lt;/b&gt;Apache Spark, Bigdata Analytics Architecture and Design, Databricks Unified Data Analytics Platform&lt;br&gt;Minimum &lt;b&gt;15&lt;/b&gt; year(s) of experience is required&lt;b&gt;Educational Qualification :&lt;/b&gt; 15 years full time education&lt;b&gt;&lt;b&gt;Summary&lt;/b&gt;:&lt;/b&gt; As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration. You will collaborate with various teams to ensure that the data architecture aligns with business objectives and technical specifications, while also addressing any challenges that arise in the data management process. Your role will require you to engage in discussions about data strategy and provide insights that drive effective decision-making across the organization. &lt;b&gt;Roles &amp;amp; Responsibilities:&lt;/b&gt; &lt;li&gt;Expected to be a Subject Matter Expert with deep knowledge and experience.&lt;/li&gt;&lt;li&gt;Should have influencing and advisory skills.&lt;/li&gt;&lt;li&gt;Responsible for team decisions.&lt;/li&gt;&lt;li&gt;Engage with multiple teams and contribute on key decisions.&lt;/li&gt;&lt;li&gt;Expected to provide solutions to problems that apply across multiple teams.&lt;/li&gt;&lt;li&gt;Facilitate workshops and discussions to gather requirements and feedback from stakeholders.&lt;/li&gt;&lt;li&gt;Continuously evaluate and improve data architecture practices to enhance efficiency and effectiveness. &lt;b&gt;Professional &amp;amp; Technical Skills:&lt;/b&gt; &lt;br&gt;&lt;b&gt;Must To Have Skills:&lt;/b&gt;&lt;br&gt;&lt;/li&gt;&lt;li&gt; Proficiency in Data Engineering.&lt;b&gt;Good To Have Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;Experience with Apache Spark, Bigdata Analytics Architecture and Design, Databricks Unified Data Analytics Platform.&lt;/li&gt;&lt;li&gt;Strong understanding of data modeling techniques and best practices.&lt;/li&gt;&lt;li&gt;Experience with data integration tools and ETL processes.&lt;/li&gt;&lt;li&gt;Familiarity with cloud-based data storage solutions and architectures. &lt;b&gt;Additional Information:&lt;/b&gt; &lt;/li&gt;&lt;li&gt;The candidate should have minimum 15 years of experience in Data Engineering.&lt;/li&gt;&lt;li&gt;This position is based at our Bengaluru office.&lt;/li&gt;&lt;li&gt;A 15 years full time education is required.&lt;b&gt;Qualification&lt;/b&gt; 15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/811529/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/811529/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Fri, 13 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/811514/senior-mainframe-ims-db2-database-administrator-at-cirruslabs/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt; We are &lt;strong&gt;CirrusLabs&lt;/strong&gt; . Our vision is to become the world&apos;s most sought-after niche digital transformation company that helps customers realize value through innovation. Our mission is to co-create success with our customers, partners and community. Our goal is to enable employees to dream, grow and make things happen. We are committed to excellence. We are a dependable partner organization that delivers on commitments. We strive to maintain integrity with our employees and customers. Every action we take is driven by value. The core of who we are is through our well-knit teams and employees. You are the core of a values driven organization.&lt;br&gt;&lt;/p&gt;&lt;p&gt;You have an entrepreneurial spirit. You enjoy working as a part of well-knit teams. You value the team over the individual. You welcome diversity at work and within the greater community. You aren&apos;t afraid to take risks. You appreciate a growth path with your leadership team that journeys how you can grow inside and outside of the organization. You thrive upon continuing education programs that your company sponsors to strengthen your skills and for you to become a thought leader ahead of the industry curve.&lt;br&gt;&lt;/p&gt;&lt;p&gt;You are excited about creating change because your skills can help the greater good of every customer, industry and community. We are hiring a talented &lt;strong&gt;Senior Mainframe IMS &amp;amp; Db2 Database Administrator&lt;/strong&gt; to join our team. If you&apos;re excited to be part of a winning team, CirrusLabs ( &lt;strong&gt;&lt;u&gt;http://www.cirruslabs.io&lt;/u&gt;&lt;/strong&gt; ) is a great place to grow your career.&lt;br&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Experience - 10+ years&lt;/strong&gt;&lt;br&gt;&lt;strong&gt;Location - Bengaluru, Hyderabad, Chennai, Mumbai, Pune, Kolkata and Gurugram.&lt;/strong&gt;&lt;br&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Job Summary&lt;/strong&gt;&lt;br&gt;We are seeking a &lt;strong&gt;Senior Mainframe IMS &amp;amp; Db2 Database Administrator&lt;/strong&gt; to lead the engineering, maintenance, and optimization of our core banking/insurance systems. You will be responsible for the health of high-volume &lt;strong&gt;Db2 for z/OS&lt;/strong&gt; and &lt;strong&gt;IMS DB/DC&lt;/strong&gt; environments, ensuring sub-second response times for millions of daily transactions while leading modernization efforts like &lt;strong&gt;Zowe&lt;/strong&gt; adoption and &lt;strong&gt;Db2 AI&lt;/strong&gt; integration.&lt;br&gt;&lt;strong&gt;Key Responsibilities&lt;/strong&gt;&lt;br&gt;&lt;strong&gt;Db2 for z/OS Administration&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;&lt;strong&gt;System Tuning:&lt;/strong&gt; Perform deep-level subsystem tuning, buffer pool optimization, and ZPARM management for &lt;strong&gt;Db2 13 (or 14)&lt;/strong&gt;.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Query Performance:&lt;/strong&gt; Use &lt;strong&gt;IBM Data Studio&lt;/strong&gt; or &lt;strong&gt;Query Tuner&lt;/strong&gt; to analyze complex SQL and implement advanced indexing, MQTs, and partitioning.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Utility Management:&lt;/strong&gt; Execute and automate high-speed utilities (LOAD, REORG, RUNSTATS, UNLOAD) using &lt;strong&gt;IBM or BMC toolsets&lt;/strong&gt;.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Modernization:&lt;/strong&gt; Implement &lt;strong&gt;Db2 AI for z/OS (Db2ZAI)&lt;/strong&gt; for autonomous performance tuning and leverage &lt;strong&gt;Restful APIs&lt;/strong&gt; for Db2 data access.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;IMS DB/DC Administration&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;&lt;strong&gt;Database Design:&lt;/strong&gt; Manage &lt;strong&gt;IMS Full Function&lt;/strong&gt; and &lt;strong&gt;Fast Path (DEDB)&lt;/strong&gt; databases. Lead the conversion of DBDs and PSBs to modern formats where applicable.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Transaction Management:&lt;/strong&gt; Monitor and tune &lt;strong&gt;IMS DC (Data Communications)&lt;/strong&gt; regions, message queues, and transaction definitions.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;HALDB Management:&lt;/strong&gt; Design and maintain &lt;strong&gt;High Availability Large Databases (HALDB)&lt;/strong&gt;, including partition management and online reorganization.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Data Sharing:&lt;/strong&gt; Configure and troubleshoot &lt;strong&gt;IMS and Db2 Sysplex Data Sharing&lt;/strong&gt; to ensure continuous availability.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Operations &amp;amp; Security&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;&lt;strong&gt;Disaster Recovery:&lt;/strong&gt; Lead &lt;strong&gt;Backup &amp;amp; Recovery&lt;/strong&gt; strategies using &lt;strong&gt;GDPS&lt;/strong&gt;, FlashCopy, and standard recovery utilities to meet zero-data-loss RPOs.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Security &amp;amp; Compliance:&lt;/strong&gt; Enforce &lt;strong&gt;RACF/ACF2&lt;/strong&gt; security protocols and implement database encryption (per Pervasive Encryption standards).&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Automation:&lt;/strong&gt; Develop and maintain &lt;strong&gt;JCL&lt;/strong&gt;, &lt;strong&gt;REXX&lt;/strong&gt;, and &lt;strong&gt;Python&lt;/strong&gt; scripts for automated health checks and space management.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Required Qualifications&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;&lt;strong&gt;Experience:&lt;/strong&gt; 10+ years as a Mainframe DBA, with at least 5 years managing dual &lt;strong&gt;IMS&lt;/strong&gt; and &lt;strong&gt;Db2&lt;/strong&gt; environments.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Technical Skills:&lt;/strong&gt;&lt;/li&gt;&lt;ul&gt;&lt;li&gt;Expertise in &lt;strong&gt;z/OS&lt;/strong&gt; environment, &lt;strong&gt;TSO/ISPF&lt;/strong&gt;, &lt;strong&gt;JCL&lt;/strong&gt;, and &lt;strong&gt;SMP/E&lt;/strong&gt;.&lt;/li&gt;&lt;li&gt;Deep understanding of &lt;strong&gt;Coupling Facility&lt;/strong&gt; and &lt;strong&gt;Parallel Sysplex&lt;/strong&gt; architecture.&lt;/li&gt;&lt;li&gt;Proficiency with third-party tools (e.g., &lt;strong&gt;CA/Broadcom&lt;/strong&gt;, &lt;strong&gt;BMC&lt;/strong&gt;, or &lt;strong&gt;IBM Db2 Tools&lt;/strong&gt;).&lt;/li&gt;&lt;/ul&gt;&lt;li&gt;&lt;strong&gt;Education:&lt;/strong&gt; Bachelor&apos;s degree in Computer Science or a related field.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Preferred Skills (2026 Context)&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;&lt;strong&gt;DevOps for z/OS:&lt;/strong&gt; Experience with &lt;strong&gt;Zowe&lt;/strong&gt;, &lt;strong&gt;Ansible for IBM Z&lt;/strong&gt;, or &lt;strong&gt;IBM DB2 DevOps Experience&lt;/strong&gt;.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Hybrid Cloud:&lt;/strong&gt; Familiarity with &lt;strong&gt;IBM Cloud Pak for Data&lt;/strong&gt; or replicating mainframe data to cloud targets (AWS/Azure) using &lt;strong&gt;IDR (InfoSphere Data Replication)&lt;/strong&gt;.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Application Knowledge:&lt;/strong&gt; Familiarity with &lt;strong&gt;COBOL&lt;/strong&gt;, &lt;strong&gt;PL/I&lt;/strong&gt;, or &lt;strong&gt;Java on z/OS&lt;/strong&gt;.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Soft Skills&lt;/strong&gt;&lt;br&gt;&lt;strong&gt;Incident Leadership:&lt;/strong&gt; Ability to lead &quot;War Room &quot; calls for high-severity production outages. &lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Cirruslabs&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/811514/senior-mainframe-ims-db2-database-administrator-at-cirruslabs/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/811514/senior-mainframe-ims-db2-database-administrator-at-cirruslabs/</link>
  <title>[Full Time] Senior Mainframe IMS &amp;amp; Db2 Database Administrator at Cirruslabs</title>
  <dc:date>Fri, 13 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/811599/cloud-iaas-architecture-design-oracle-cloud-expert-at-zensar/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt; &lt;span&gt; &lt;span&gt;Experience with any of the following:&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Oracle Experience&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Installing, configuring, and maintaining Oracle databases, as well as developing and implementing backup and recovery strategies, optimizing performance, and resolving database issues. They are also responsible for security policies, data migration, and often contribute to infrastructure planning and performance testing.&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Core Responsibilities:&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Database Installation and Configuration: Setting up and configuring Oracle databases according to specific requirements.&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Backup and Recovery: Implementing and maintaining robust backup and recovery procedures to ensure data integrity and availability.&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Performance Tuning: Optimizing database performance through tuning, indexing, and other techniques.&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Troubleshooting and Issue Resolution: Identifying and resolving database-related issues, including performance bottlenecks and security vulnerabilities.&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Security Management: Implementing and managing database security policies and procedures.&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Understanding of Linux operating systems: Knowledge in Linux commands, file systems, and system administration tools.&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Scripting skills: Ability to write scripts in languages like Bash or Python to automate tasks.&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Troubleshooting skills: Ability to diagnose and resolve system issues.&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Knowledge on Git commands&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Secondary Important Skill&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; Microsoft SQL Server, Microsoft Transact-SQL;&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; Microsoft SQL Server Management Studio,;&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; Microsoft Windows Server 2003/2008/2012;&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; SQL Server Database Administration 2008 R2 and 2012 and or 2016 and or 2017&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Cloud Experience&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; AWS RDS and EC2 Database administration.&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; AVD Azure Database administration.&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Experience in&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; High Availability ( Always on and Mirroring and Replication ),&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; Database Migration,&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; Database Monitoring&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; Performance tuning and optimization, troubleshooting problems&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; Manage back-ups and recovery procedures, troubleshooting problems&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Assist developers with query tuning and query /schema refinement&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Writing T-SQL Basic query like Select, Insert, Update , Delete statements and basic information about Joins.&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Familiar with SSIS, Can able to execute SSIS package.&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Able to multitask, prioritize, and manage time efficiently&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Accurate and precise attention to detail&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Strong written and verbal communication skills&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Excellent analytical, quantitative, and organizational skills&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Sense of ownership and pride in your performance and its impact on company s success&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Critical thinker and problem-solving skills&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Understanding of Linux operating systems: Knowledge in Linux commands, file systems, and system administration tools.&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Scripting skills: Ability to write scripts in languages like Bash or Python to automate tasks.&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Troubleshooting skills: Ability to diagnose and resolve system issues.&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Knowledge on Git commands&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Secondary Important Skill&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; Microsoft SQL Server, Microsoft Transact-SQL;&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; Microsoft SQL Server Management Studio,;&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; Microsoft Windows Server 2003/2008/2012;&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; SQL Server Database Administration 2008 R2 and 2012 and or 2016 and or 2017&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Cloud Experience&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; AWS RDS and EC2 Database administration.&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; AVD Azure Database administration.&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Experience in&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; High Availability ( Always on and Mirroring and Replication ),&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; Database Migration,&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; Database Monitoring&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; Performance tuning and optimization, troubleshooting problems&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; Manage back-ups and recovery procedures, troubleshooting problems&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Assist developers with query tuning and query /schema refinement&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Writing T-SQL Basic query like Select, Insert, Update , Delete statements and basic information about Joins.&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Familiar with SSIS, Can able to execute SSIS package.&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Able to multitask, prioritize, and manage time efficiently&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Accurate and precise attention to detail&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Strong written and verbal communication skills&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Excellent analytical, quantitative, and organizational skills&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Sense of ownership and pride in your performance and its impact on company s success&lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt;Critical thinker and problem-solving skills&lt;/span&gt; &lt;/span&gt; &lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Architect / Designer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Zensar&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Pune&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/811599/cloud-iaas-architecture-design-oracle-cloud-expert-at-zensar/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/811599/cloud-iaas-architecture-design-oracle-cloud-expert-at-zensar/</link>
  <title>[Full Time] Cloud IaaS Architecture &amp;amp; Design Oracle Cloud Expert at Zensar</title>
  <dc:date>Fri, 13 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/811883/etl-aws-glue-senior-engineer-at-iris-software/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;&lt;strong&gt;Key Responsibilities:&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Build and maintain data architectures pipelines for the transfer and processing of durable, complete and consistent data.&lt;/li&gt;&lt;li&gt;Design and implementation of data warehouse and data lakes that manage the appropriate data volumes&lt;/li&gt;&lt;li&gt;Adhere to the required security measures.&lt;/li&gt;&lt;li&gt;Development of processing and analysis algorithms fit for the intended data complexity and volumes.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Mandatory Skills&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;5+ years of hands-on strong experience in &lt;strong&gt;ETL , Glue, Python, AWS services such as S3, Lambda, CloudWatch, and IAM&lt;/strong&gt;&lt;/li&gt;&lt;li&gt;Hands-on experience in creating, optimizing, and monitoring &lt;strong&gt;Glue jobs and workflows&lt;/strong&gt; is essential&lt;/li&gt;&lt;li&gt;Very good knowledge of &lt;strong&gt;Datawarehouse, ETL concepts, Unix commands and shell/bash script&lt;/strong&gt;&lt;/li&gt;&lt;li&gt;Proficient in &lt;strong&gt;SQL&lt;/strong&gt;, data modeling, and performance tuning&lt;/li&gt;&lt;li&gt;Strong knowledge of&lt;strong&gt; Oracle database, SQL queries, performance tuning of queries&lt;/strong&gt;&lt;/li&gt;&lt;li&gt;Experience with Agile (Scrum) methodology and software development approach&lt;/li&gt;&lt;li&gt;Must come financial services/capital markets/investment banking&lt;/li&gt;&lt;li&gt;Great interpersonal and communication skills&lt;/li&gt;&lt;li&gt;Can work independently with minimum supervision&lt;/li&gt;&lt;li&gt;Good Communication and analytical skills&lt;/li&gt;&lt;li&gt;Flexible and adaptable working style to work with multiple stakeholders&lt;/li&gt;&lt;li&gt;Open to work in UK Shift (10 AM to 9 PM IST with 8 hours of productive work)&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Desirable&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Prior experience of working with &lt;strong&gt;Pyspark&lt;/strong&gt; would be an added advantage.&lt;/li&gt;&lt;li&gt;Financial domain knowledge withexperience in handling high data volume business cases&lt;/li&gt;&lt;li&gt;Certification in Banking domain&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Education Qualification: &lt;/strong&gt;B.Tech or MCA&lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;/div&gt;&lt;div&gt;&lt;div&gt;&lt;b&gt;&lt;b&gt;Mandatory Competencies&lt;/b&gt;&lt;/b&gt;&lt;/div&gt;&lt;div&gt;&lt;div&gt;ETL - ETL - AWS Glue&lt;/div&gt;&lt;div&gt;Cloud - AWS - AWS S3, S3 glacier, AWS EBS&lt;/div&gt;&lt;div&gt;Cloud - AWS - AWS Lambda,AWS EventBridge, AWS Fargate&lt;/div&gt;&lt;div&gt;Cloud - AWS - Amazon CloudWatch&lt;/div&gt;&lt;div&gt;Cloud - AWS - Amazon IAM, AWS Secrets Manager, AWS KMS, AWS Cognito&lt;/div&gt;&lt;div&gt;Operating System - Operating System - Unix&lt;/div&gt;&lt;div&gt;DevOps/Configuration Mgmt - DevOps/Configuration Mgmt - Basic Bash/Shell script writing&lt;/div&gt;&lt;div&gt;Database - Oracle - PL/SQL Packages&lt;/div&gt;&lt;div&gt;Database - Database Programming - SQL&lt;/div&gt;&lt;div&gt;Agile - Agile - SCRUM&lt;/div&gt;&lt;div&gt;Programming Language - Python - OOPS Concepts&lt;/div&gt;&lt;div&gt;Beh - Communication and collaboration&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;div&gt;&lt;/div&gt;&lt;p&gt;&lt;strong&gt;&lt;span&gt;Perks and Benefits for Irisians&lt;/span&gt;&lt;/strong&gt;&lt;/p&gt;&lt;li&gt;Iris provides world-class benefits for a personalized employee experience. These benefits are designed to support financial, health and well-being needs of Irisians for a holistic professional and personal growth. Click to view the benefits.&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;ETL Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Iris Software&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Noida, Gurugram&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/811883/etl-aws-glue-senior-engineer-at-iris-software/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/811883/etl-aws-glue-senior-engineer-at-iris-software/</link>
  <title>[Full Time] ETL AWS Glue - Senior Engineer at Iris Software</title>
  <dc:date>Fri, 13 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/813130/warehouse-logistics-assistant-male-day-shift-sal-upto-3-lpa-at-trigent/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&lt;strong&gt;&lt;u&gt;Greetings from Trigent Software!!&lt;/u&gt;&lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;&lt;u&gt;Hiring for Warehouse &amp;amp; Logistics Assistant Associate&lt;/u&gt;&lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Role: Warehouse &amp;amp; Logistics Assistant (Only male)&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Mode of Work: Work from Office&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Location: Nanakramguda &amp;amp; Gachibowli&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Working Days: 6 days&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Shift: Day Shift&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Qualification: Intermediate&lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Salary: Upto 3 LPA&lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;&lt;u&gt;Responsibilities:&lt;/u&gt;&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Lifting and moving &lt;strong&gt;IT items&lt;/strong&gt; such as laptops, monitors, CPUs, keyboards, cables, etc.&lt;/li&gt;&lt;li&gt;Shifting IT equipment &lt;strong&gt;within office buildings&lt;/strong&gt; (floor-to-floor / room-to-room)&lt;/li&gt;&lt;li&gt;Helping with &lt;strong&gt;desk setup and desk removal&lt;/strong&gt;&lt;/li&gt;&lt;ul&gt;&lt;li&gt;Placing monitors, laptops, docking stations, accessories&lt;/li&gt;&lt;/ul&gt;&lt;li&gt;Packing and unpacking IT items during office moves or refresh activities&lt;/li&gt;&lt;li&gt;Collecting IT items from office floors and placing them in &lt;strong&gt;designated IT rooms&lt;/strong&gt;&lt;/li&gt;&lt;li&gt;Supporting IT team during &lt;strong&gt;deployment, replacement, and collection&lt;/strong&gt; of equipment&lt;/li&gt;&lt;li&gt;Carrying and handling &lt;strong&gt;basic documents&lt;/strong&gt; like gate pass and delivery slips (as guided)&lt;/li&gt;&lt;/ul&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Important Notes:&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;No technical or computer knowledge required&lt;/li&gt;&lt;li&gt;All work will be done &lt;strong&gt;under supervision&lt;/strong&gt;&lt;/li&gt;&lt;li&gt;Uniform and ID badge must be worn at work&lt;/li&gt;&lt;li&gt;Must follow office security and conduct rules&lt;/li&gt;&lt;/ul&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Rounds of Interview: Screening round &amp;gt; Manager Round &amp;gt; HR Round&lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;&lt;u&gt;Preferred Candidates Profiles:&lt;/u&gt;&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;* Candidate should be comfortable to perform manual handling and lifting of IT equipment.&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;* Candidate should be comfortable&lt;/strong&gt; &lt;strong&gt;to work in warehouse and corporate office environments.&lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;&lt;u&gt;Perks and Benefits:&lt;/u&gt;&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;* 2 - way cab facility&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;* Food facility&lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Interested candidates can contact&lt;/strong&gt; &lt;strong&gt;HR Reena at @8072181834&lt;/strong&gt; &lt;strong&gt;or can share their resumes to&lt;/strong&gt; &lt;strong&gt;reena_s@trigent.com&lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Regards,&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;HR Reena&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Trigent Software&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;8072181834&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;reena_s@trigent.com&lt;/strong&gt;&lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;BPM / BPO&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;DBA / Data warehousing - Other&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Trigent&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/813130/warehouse-logistics-assistant-male-day-shift-sal-upto-3-lpa-at-trigent/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/813130/warehouse-logistics-assistant-male-day-shift-sal-upto-3-lpa-at-trigent/</link>
  <title>[Full Time] Warehouse &amp;amp; Logistics Assistant (male) - Day Shift - Sal upto 3 LPA at Trigent</title>
  <dc:date>Fri, 13 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/811608/scrum-master-at-people-tech/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt; Hello, &lt;/p&gt;&lt;br&gt;&lt;p&gt;We are looking for an experienced &lt;strong&gt;Scrum Master&lt;/strong&gt; to drive Agile delivery for enterprise-level programs in a secure, compliance-driven environment, with strong exposure to &lt;strong&gt;AWS cloud-based data and application projects&lt;/strong&gt;.&lt;/p&gt;&lt;br&gt;&lt;p&gt;Experience: 3+ Years&lt;/p&gt;&lt;p&gt;Location: Bangalore&lt;/p&gt;&lt;br&gt;&lt;p&gt; &lt;strong&gt;Key Responsibilities&lt;/strong&gt;&lt;br&gt;  Facilitate Scrum ceremonies &amp;amp; ensure Agile best practices&lt;br&gt;  Drive delivery of &lt;strong&gt;AWS cloud data &amp;amp; application initiatives&lt;/strong&gt;&lt;br&gt;  Remove impediments &amp;amp; enable high-performing cross-functional teams&lt;br&gt;  Partner with Product Owners on backlog refinement &amp;amp; sprint planning&lt;br&gt;  Track sprint metrics, velocity &amp;amp; delivery milestones&lt;br&gt;  Ensure alignment with compliance &amp;amp; security standards&lt;/p&gt;&lt;p&gt; &lt;strong&gt;Required Skills&lt;/strong&gt;&lt;br&gt;  Strong experience in &lt;strong&gt;Scrum / Agile / SAFe environments&lt;/strong&gt;&lt;br&gt;  Proven experience working on &lt;strong&gt;AWS cloud data platforms &amp;amp; application development projects&lt;/strong&gt;&lt;br&gt;  Exposure to &lt;strong&gt;ETL, data engineering, or data platform initiatives&lt;/strong&gt;&lt;br&gt;  Expertise with &lt;strong&gt;JIRA / Azure DevOps / Confluence&lt;/strong&gt;&lt;br&gt;  Strong stakeholder &amp;amp; cross-functional coordination skills&lt;/p&gt;&lt;p&gt; &lt;strong&gt;Good to Have&lt;/strong&gt;&lt;br&gt;  Experience in &lt;strong&gt;Government Cloud environments&lt;/strong&gt;&lt;br&gt;  Knowledge of &lt;strong&gt;ITAR compliance &amp;amp; secure environments&lt;/strong&gt;&lt;br&gt;  Familiarity with &lt;strong&gt;DevOps, CI/CD pipelines&lt;/strong&gt;&lt;br&gt;  Exposure to &lt;strong&gt;Palantir &amp;amp; full-stack teams&lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Please share your email at Smrity.rani@peopletech.com&lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Thanks &amp;amp; Regards,&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Smrity&lt;/strong&gt;&lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Developer / Engineer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;People Tech&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/811608/scrum-master-at-people-tech/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/811608/scrum-master-at-people-tech/</link>
  <title>[Full Time] Scrum Master at People Tech</title>
  <dc:date>Thu, 12 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/811451/senior-mongodb-administrator-at-cirruslabs/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt; We are &lt;strong&gt;CirrusLabs&lt;/strong&gt; . Our vision is to become the world&apos;s most sought-after niche digital transformation company that helps customers realize value through innovation. Our mission is to co-create success with our customers, partners and community. Our goal is to enable employees to dream, grow and make things happen. We are committed to excellence. We are a dependable partner organization that delivers on commitments. We strive to maintain integrity with our employees and customers. Every action we take is driven by value. The core of who we are is through our well-knit teams and employees. You are the core of a values driven organization.&lt;br&gt;&lt;/p&gt;&lt;p&gt;You have an entrepreneurial spirit. You enjoy working as a part of well-knit teams. You value the team over the individual. You welcome diversity at work and within the greater community. You aren&apos;t afraid to take risks. You appreciate a growth path with your leadership team that journeys how you can grow inside and outside of the organization. You thrive upon continuing education programs that your company sponsors to strengthen your skills and for you to become a thought leader ahead of the industry curve.&lt;br&gt;&lt;/p&gt;&lt;p&gt;You are excited about creating change because your skills can help the greater good of every customer, industry and community. We are hiring a talented &lt;strong&gt;Senior MongoDB Database Administrator&lt;/strong&gt; &lt;strong&gt;(DBA)&lt;/strong&gt;to join our team. If you&apos;re excited to be part of a winning team, CirrusLabs ( &lt;strong&gt;&lt;u&gt;http://www.cirruslabs.io&lt;/u&gt;&lt;/strong&gt; ) is a great place to grow your career.&lt;br&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Experience - 6-10 years&lt;/strong&gt;&lt;br&gt;&lt;strong&gt;Location - Bengaluru, Hyderabad, Chennai, Mumbai, Pune, Kolkata and Gurugram.&lt;/strong&gt;&lt;br&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Role Summary&lt;/strong&gt;&lt;br&gt;We are seeking a &lt;strong&gt;Senior MongoDB Database Administrator&lt;/strong&gt; to lead the architecture, scalability, and security of our high-traffic NoSQL database infrastructure. You will be the technical lead for MongoDB deployments across on-premises and cloud environments (AWS/Azure/GCP), ensuring 99.99% availability and industry-leading performance.&lt;br&gt;&lt;strong&gt;Key Responsibilities&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;&lt;strong&gt;Architecture &amp;amp; Scaling:&lt;/strong&gt; Design and implement highly available &lt;strong&gt;Replica Sets&lt;/strong&gt; and &lt;strong&gt;Sharded Clusters&lt;/strong&gt;. Lead horizontal scaling initiatives by selecting optimal shard keys and managing chunk distribution.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Performance Optimization:&lt;/strong&gt; Conduct deep-dive query profiling and optimize the &lt;strong&gt;Aggregation Framework&lt;/strong&gt; pipelines. Manage advanced indexing strategies, including compound, multikey, and TTL indexes.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Cloud &amp;amp; Atlas Administration:&lt;/strong&gt; Manage &lt;strong&gt;MongoDB Atlas&lt;/strong&gt; environments, including cluster tiering, auto-scaling configuration, and serverless instance management.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Security &amp;amp; Compliance:&lt;/strong&gt; Implement &lt;strong&gt;Role-Based Access Control (RBAC)&lt;/strong&gt;, Field-Level Client-Side Encryption, and auditing to meet GDPR or SOC2 standards.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Automation (DBaaS):&lt;/strong&gt; Develop &quot;Infrastructure as Code &quot; (IaC) using &lt;strong&gt;Terraform or Ansible&lt;/strong&gt; to automate database provisioning and upgrades.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Backup &amp;amp; Disaster Recovery:&lt;/strong&gt; Design robust DR strategies with sub-minute Recovery Time Objectives (RTO) using Atlas Backup or &lt;strong&gt;Ops Manager&lt;/strong&gt; snapshots.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Collaboration:&lt;/strong&gt; Act as a Subject Matter Expert (SME) for development teams to provide guidance on &lt;strong&gt;schema design&lt;/strong&gt; (embedding vs. linking) and data modeling.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Required Qualifications&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;&lt;strong&gt;Experience:&lt;/strong&gt; 6-10+ years of professional DBA experience, with at least 4-5 years dedicated to MongoDB in a large-scale production environment.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Technical Skills:&lt;/strong&gt;&lt;/li&gt;&lt;ul&gt;&lt;li&gt;Expertise in &lt;strong&gt;Linux/Unix administration&lt;/strong&gt; and shell scripting (Python/Bash).&lt;/li&gt;&lt;li&gt;Deep understanding of &lt;strong&gt;WiredTiger&lt;/strong&gt; storage engine internals.&lt;/li&gt;&lt;li&gt;Experience with monitoring tools like &lt;strong&gt;Prometheus, Grafana&lt;/strong&gt;, or MongoDB Cloud Manager.&lt;/li&gt;&lt;/ul&gt;&lt;li&gt;&lt;strong&gt;Education:&lt;/strong&gt; Bachelor&apos;s or Master&apos;s degree in Computer Science or a related technical field.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Preferred Certifications&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;&lt;strong&gt;MongoDB Certified Associate DBA&lt;/strong&gt; (2026 Version).&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Associate Atlas Administrator&lt;/strong&gt;.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Cloud Platform Certifications&lt;/strong&gt; (e.g., AWS Certified Database - Specialty or Azure Database Administrator).&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Desired Soft Skills&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;&lt;strong&gt;Problem-Solving:&lt;/strong&gt; Proven ability to resolve complex production incidents under 24/7 high-pressure rotations.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Mentorship:&lt;/strong&gt; Experience mentoring junior DBAs and leading technical cross-functional projects.&lt;/li&gt;&lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Cirruslabs&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/811451/senior-mongodb-administrator-at-cirruslabs/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/811451/senior-mongodb-administrator-at-cirruslabs/</link>
  <title>[Full Time] Senior MongoDB Administrator at Cirruslabs</title>
  <dc:date>Thu, 12 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/811368/assoc-architect-it-data-architecture-at-baxter/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &amp;nbsp; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; Designs develop, automates, and support complex applications to extract, transform, and load data. &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; Ensures data quality. &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; Develops logical and physical data flow models for ETL applications. &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; Leads the design of the logical data model and implements the physical database structure and constructs and implements operational data stores and data marts. &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; Collaborate with other IT specialists to rapidly develop and deliver solutions that meet changing business needs &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Work with data owners to document data mappings and transformations to support effective downstream analytics and aler &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; &lt;/span&gt; Make recommendations and advise on data refresh, optimization of data, data storage, and data integration &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; Attend various meetings as Subject Matter Expert for ETL &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; Technology skillset: Oracle, DataStage, Snowflake, AWS, Python &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Pharmaceutical &amp;amp; Life Sciences&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Baxter&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/811368/assoc-architect-it-data-architecture-at-baxter/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/811368/assoc-architect-it-data-architecture-at-baxter/</link>
  <title>[Full Time] Assoc Architect, IT Data Architecture at Baxter</title>
  <dc:date>Thu, 12 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/811363/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;Project Role :&lt;/b&gt;Data Architect&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Purview&lt;b&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;b&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;&lt;b&gt;Summary&lt;/b&gt;:&lt;/b&gt; As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration. You will collaborate with various teams to ensure that the data architecture aligns with business objectives and technical specifications, while also addressing any challenges that arise during the development process. Your role will require a blend of analytical thinking and creative problem-solving to create efficient and scalable data solutions that meet the needs of the organization. Key Responsibilities: Function as the Lead Data Architect for a small, simple project/proposal or as a team lead for medium/large sized project or proposal Candidate Should have experience in Design, develop, and deploy solutions using Microsoft Purview for data governance, cataloguing, security, Quality and compliance. Integrate Microsoft Purview with various data sources and platforms to establish seamless data lineage, metadata management, and governance workflows. Configure and customize data classification, labelling, and sensitivity policies to ensure compliance with business standards and regulatory frameworks. Candidate should have understanding of Azure Date services like Azure Data factory, Azure Databricks, Fabric and Azure SQL database. Collaborate with data architects, analysts, and IT teams to map data sources and enforce governance best practices. Monitor and troubleshoot Purview services, ensuring optimal performance and integration with Azure and other data platforms. Document technical specifications, best practices, and governance workflows, providing training and guidance to data stewards and business users. Worked in implementation of AI, Copilot use cases for Data Governance Thought leadership and innovation Experience in handling OLTP and OLAP data workloads, Star Snowflake schema, Entity Relationship diagram and other data architecture related concepts&lt;br&gt;Technical Experience: Candidate should have strong experience in developing and deploying solutions with Microsoft Purview or similar data governance platforms. Proficiency in Microsoft Azure services, including Azure Data Factory, Azure Synapse Analytics, Azure SQL Database, Azure Blob Storage Delta Lake concepts. Strong understanding of data governance principles, including metadata management, data cataloging, lineage tracking, and compliance frameworks of Azure Purview. Experience with data integration, ETL pipelines, and data modelling to structure and organize enterprise-wide data. Expert level in Designing and Architect solutions in Azure Databricks, Azure Data factory, Azure Fabric. Candidate must have knowledge of Database like Azure SQL, Oracle and Azure Data Services like ADLS, Delta/Data Lake Candidate should have good understanding of Python, Pyspark and AI features of Data governance tool Experience in Profisee/Unity Catalog is an added advantage Candidate must have 8-15 years of IT experience and around 5+ years of extensive Data Governance experience Architect for a medium sized client delivery projectProfessional Experience: Should be able to drive the technology design meetings, propose technology design and architecture Should have excellent client communication skills Should have good analytical and problem-solving skills &lt;br&gt;Educational Qualification: Must have:BE/BTech/MCA Good to have:ME/MTech&lt;b&gt;Qualification&lt;/b&gt; 15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/811363/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/811363/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Thu, 12 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/811355/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;Project Role :&lt;/b&gt;Data Architect&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Purview&lt;b&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;b&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;&lt;b&gt;Summary&lt;/b&gt;:&lt;/b&gt; As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration. You will collaborate with various teams to ensure that the data architecture aligns with business objectives and technical specifications, while also addressing any challenges that arise in the data management process. Your role will be pivotal in establishing a robust data framework that supports the organization&apos;&apos;s data strategy and enhances data accessibility and usability across different platforms. Key Responsibilities: Function as the Lead Data Architect for a small, simple project/proposal or as a team lead for medium/large sized project or proposal Candidate Should have experience in Design, develop, and deploy solutions using Microsoft Purview for data governance, cataloguing, security, Quality and compliance. Integrate Microsoft Purview with various data sources and platforms to establish seamless data lineage, metadata management, and governance workflows. Configure and customize data classification, labelling, and sensitivity policies to ensure compliance with business standards and regulatory frameworks. Candidate should have understanding of Azure Date services like Azure Data factory, Azure Databricks, Fabric and Azure SQL database. Collaborate with data architects, analysts, and IT teams to map data sources and enforce governance best practices. Monitor and troubleshoot Purview services, ensuring optimal performance and integration with Azure and other data platforms. Document technical specifications, best practices, and governance workflows, providing training and guidance to data stewards and business users. Worked in implementation of AI, Copilot use cases for Data Governance Thought leadership and innovation Experience in handling OLTP and OLAP data workloads, Star Snowflake schema, Entity Relationship diagram and other data architecture related concepts&lt;br&gt;Technical Experience: Candidate should have strong experience in developing and deploying solutions with Microsoft Purview or similar data governance platforms. Proficiency in Microsoft Azure services, including Azure Data Factory, Azure Synapse Analytics, Azure SQL Database, Azure Blob Storage Delta Lake concepts. Strong understanding of data governance principles, including metadata management, data cataloging, lineage tracking, and compliance frameworks of Azure Purview. Experience with data integration, ETL pipelines, and data modelling to structure and organize enterprise-wide data. Expert level in Designing and Architect solutions in Azure Databricks, Azure Data factory, Azure Fabric. Candidate must have knowledge of Database like Azure SQL, Oracle and Azure Data Services like ADLS, Delta/Data Lake Candidate should have good understanding of Python, Pyspark and AI features of Data governance tool Experience in Profisee/Unity Catalog is an added advantage Candidate must have 8-15 years of IT experience and around 5+ years of extensive Data Governance experience Architect for a medium sized client delivery projectProfessional Experience: Should be able to drive the technology design meetings, propose technology design and architecture Should have excellent client communication skills Should have good analytical and problem-solving skills &lt;br&gt;Educational Qualification: Must have:BE/BTech/MCA Good to have:ME/MTech&lt;b&gt;Qualification&lt;/b&gt; 15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Pune&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/811355/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/811355/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Thu, 12 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/811331/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt; About The Role &lt;/b&gt; &lt;br&gt; &lt;b&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt; &lt;b&gt; Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt; &lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Azure Data Services &lt;b&gt; &lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;5&lt;/b&gt; year(s) of experience is required &lt;b&gt; &lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education &lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the data architecture aligns with business objectives and supports efficient data management practices. You will collaborate with various teams to ensure that the data architecture is robust, scalable, and meets the needs of the organization. Roles &amp;amp; Responsibilities:- Expected to be an SME.- Collaborate and manage the team to perform.- Responsible for team decisions.- Engage with multiple teams and contribute on key decisions.- Provide solutions to problems for their immediate team and across multiple teams.- Facilitate knowledge sharing and mentoring within the team to enhance overall team performance.- Evaluate and recommend new technologies and tools to improve data architecture and processes. Professional &amp;amp; Technical &lt;b&gt;Skills:&lt;/b&gt; Must To Have &lt;b&gt;Skills:&lt;/b&gt; &lt;li&gt;Proficiency in Microsoft Azure Data Services.- Experience with data modeling and database design.- Strong understanding of data integration techniques and ETL processes.- Familiarity with cloud-based data storage solutions and architectures.- Ability to analyze and optimize data workflows for performance and efficiency. Additional Information:- The candidate should have minimum 5 years of experience in Microsoft Azure Data Services.- This position is based at our Bengaluru office.- A 15 years full time education is required.&lt;b&gt; Qualification&lt;/b&gt; 15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/811331/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/811331/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Thu, 12 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/811329/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;Project Role :&lt;/b&gt;Data Architect&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Purview&lt;b&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;5&lt;/b&gt; year(s) of experience is required&lt;b&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;&lt;b&gt;Summary&lt;/b&gt;:&lt;/b&gt; As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration. You will collaborate with various teams to ensure that the data architecture aligns with business objectives and technical specifications, while also addressing any challenges that arise in the data management process. Your role will be pivotal in establishing a robust data framework that supports the organization&apos;&apos;s data strategy and enhances data accessibility and usability across different platforms. Key Responsibilities: Function as the Lead Data Architect for a small, simple project/proposal or as a team lead for medium/large sized project or proposal Candidate Should have experience in Design, develop, and deploy solutions using Microsoft Purview for data governance, cataloguing, security, Quality and compliance. Integrate Microsoft Purview with various data sources and platforms to establish seamless data lineage, metadata management, and governance workflows. Configure and customize data classification, labelling, and sensitivity policies to ensure compliance with business standards and regulatory frameworks. Candidate should have understanding of Azure Date services like Azure Data factory, Azure Databricks, Fabric and Azure SQL database. Collaborate with data architects, analysts, and IT teams to map data sources and enforce governance best practices. Monitor and troubleshoot Purview services, ensuring optimal performance and integration with Azure and other data platforms. Document technical specifications, best practices, and governance workflows, providing training and guidance to data stewards and business users. Worked in implementation of AI, Copilot use cases for Data Governance Thought leadership and innovation Experience in handling OLTP and OLAP data workloads, Star Snowflake schema, Entity Relationship diagram and other data architecture related concepts&lt;br&gt;Technical Experience: Candidate should have strong experience in developing and deploying solutions with Microsoft Purview or similar data governance platforms. Proficiency in Microsoft Azure services, including Azure Data Factory, Azure Synapse Analytics, Azure SQL Database, Azure Blob Storage Delta Lake concepts. Strong understanding of data governance principles, including metadata management, data cataloging, lineage tracking, and compliance frameworks of Azure Purview. Experience with data integration, ETL pipelines, and data modelling to structure and organize enterprise-wide data. Expert level in Designing and Architect solutions in Azure Databricks, Azure Data factory, Azure Fabric. Candidate must have knowledge of Database like Azure SQL, Oracle and Azure Data Services like ADLS, Delta/Data Lake Candidate should have good understanding of Python, Pyspark and AI features of Data governance tool Experience in Profisee/Unity Catalog is an added advantage Candidate must have 5-8 years of IT experience and around 2+ years Data Governance experience Architect for a medium sized client delivery projectProfessional Experience: Should be able to drive the technology design meetings, propose technology design and architecture Should have excellent client communication skills Should have good analytical and problem-solving skills &lt;br&gt;Educational Qualification: Must have:BE/BTech/MCA Good to have:ME/MTech&lt;b&gt;Qualification&lt;/b&gt; 15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Pune&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/811329/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/811329/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Thu, 12 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/811557/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;Project Role :&lt;/b&gt;Data Architect&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Purview&lt;b&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;5&lt;/b&gt; year(s) of experience is required&lt;b&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;&lt;b&gt;Summary&lt;/b&gt;:&lt;/b&gt; As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration. You will collaborate with various teams to ensure that the data architecture aligns with business objectives and technical specifications, while also addressing any challenges that arise during the development process. Your role will require a blend of analytical thinking and creative problem-solving to create efficient and scalable data solutions that meet the needs of the organization. Key Responsibilities: Function as the Lead Data Architect for a small, simple project/proposal or as a team lead for medium/large sized project or proposal Candidate Should have experience in Design, develop, and deploy solutions using Microsoft Purview for data governance, cataloguing, security, Quality and compliance. Integrate Microsoft Purview with various data sources and platforms to establish seamless data lineage, metadata management, and governance workflows. Configure and customize data classification, labelling, and sensitivity policies to ensure compliance with business standards and regulatory frameworks. Candidate should have understanding of Azure Date services like Azure Data factory, Azure Databricks, Fabric and Azure SQL database. Collaborate with data architects, analysts, and IT teams to map data sources and enforce governance best practices. Monitor and troubleshoot Purview services, ensuring optimal performance and integration with Azure and other data platforms. Document technical specifications, best practices, and governance workflows, providing training and guidance to data stewards and business users. Worked in implementation of AI, Copilot use cases for Data Governance Thought leadership and innovation Experience in handling OLTP and OLAP data workloads, Star Snowflake schema, Entity Relationship diagram and other data architecture related concepts&lt;br&gt;Technical Experience: Candidate should have strong experience in developing and deploying solutions with Microsoft Purview or similar data governance platforms. Proficiency in Microsoft Azure services, including Azure Data Factory, Azure Synapse Analytics, Azure SQL Database, Azure Blob Storage Delta Lake concepts. Strong understanding of data governance principles, including metadata management, data cataloging, lineage tracking, and compliance frameworks of Azure Purview. Experience with data integration, ETL pipelines, and data modelling to structure and organize enterprise-wide data. Expert level in Designing and Architect solutions in Azure Databricks, Azure Data factory, Azure Fabric. Candidate must have knowledge of Database like Azure SQL, Oracle and Azure Data Services like ADLS, Delta/Data Lake Candidate should have good understanding of Python, Pyspark and AI features of Data governance tool Experience in Profisee/Unity Catalog is an added advantage Candidate must have 5-8 years of IT experience and around 2+ years Data Architect for a medium sized client delivery projectProfessional Experience: Should be able to drive the technology design meetings, propose technology design and architecture Should have excellent client communication skills Should have good analytical and problem-solving skills &lt;br&gt;Educational Qualification: Must have:BE/BTech/MCA Good to have:ME/MTech&lt;b&gt;Qualification&lt;/b&gt; 15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/811557/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/811557/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Thu, 12 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/811474/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;Project Role :&lt;/b&gt;Data Architect&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Purview&lt;b&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;b&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;&lt;b&gt;Summary&lt;/b&gt;:&lt;/b&gt; As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration. You will collaborate with various teams to ensure that the data architecture aligns with business objectives and technical specifications, while also addressing any challenges that arise in the data management process. Your role will require a blend of analytical thinking and creative problem-solving to develop efficient data solutions that support the overall goals of the organization. Key Responsibilities: Function as the Lead Data Architect for a small, simple project/proposal or as a team lead for medium/large sized project or proposal Candidate Should have experience in Design, develop, and deploy solutions using Microsoft Purview for data governance, cataloguing, security, Quality and compliance. Integrate Microsoft Purview with various data sources and platforms to establish seamless data lineage, metadata management, and governance workflows. Configure and customize data classification, labelling, and sensitivity policies to ensure compliance with business standards and regulatory frameworks. Candidate should have understanding of Azure Date services like Azure Data factory, Azure Databricks, Fabric and Azure SQL database. Collaborate with data architects, analysts, and IT teams to map data sources and enforce governance best practices. Monitor and troubleshoot Purview services, ensuring optimal performance and integration with Azure and other data platforms. Document technical specifications, best practices, and governance workflows, providing training and guidance to data stewards and business users. Worked in implementation of AI, Copilot use cases for Data Governance Thought leadership and innovation Experience in handling OLTP and OLAP data workloads, Star Snowflake schema, Entity Relationship diagram and other data architecture related concepts&lt;br&gt;Technical Experience: Candidate should have strong experience in developing and deploying solutions with Microsoft Purview or similar data governance platforms. Proficiency in Microsoft Azure services, including Azure Data Factory, Azure Synapse Analytics, Azure SQL Database, Azure Blob Storage Delta Lake concepts. Strong understanding of data governance principles, including metadata management, data cataloging, lineage tracking, and compliance frameworks of Azure Purview. Experience with data integration, ETL pipelines, and data modelling to structure and organize enterprise-wide data. Expert level in Designing and Architect solutions in Azure Databricks, Azure Data factory, Azure Fabric. Candidate must have knowledge of Database like Azure SQL, Oracle and Azure Data Services like ADLS, Delta/Data Lake Candidate should have good understanding of Python, Pyspark and AI features of Data governance tool Experience in Profisee/Unity Catalog is an added advantage Candidate must have 8-15 years of IT experience and around 5+ years of extensive Data Governance experience Architect for a medium sized client delivery projectProfessional Experience: Should be able to drive the technology design meetings, propose technology design and architecture Should have excellent client communication skills Should have good analytical and problem-solving skills &lt;br&gt;Educational Qualification: Must have:BE/BTech/MCA Good to have:ME/MTech&lt;b&gt;Qualification&lt;/b&gt; 15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/811474/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/811474/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Thu, 12 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/811556/senior-snowflake-azure-sme-at-dxc-technology/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;span&gt; &lt;span&gt; This role focuses on architecting and developing scalable solutions in Snowflake and Power BI, with an emphasis on data modeling, transformation orchestration, and analytics. The ideal candidate is proactive, self-driven, and able to shape the BI landscape for manufacturing and financial data domains. &lt;br&gt; &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Main Responsibilities &lt;br&gt; &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Design and implement star schemas and data models for Snowflake, tailored to manufacturing KPIs, maintenance, safety, and logistics datasets. &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Write transformation code across the Snowflake medallion architecture using DBT (raw, transformed, reporting layers). &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Own master data management for fact and dimension tables in Snowflake. &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Develop and automate KPI calculations leveraging Snowflake native tooling (Dynamic Tables, Tasks, Streams). &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Build, optimize, and publish Power BI datasets and dashboards, enabling efficient connectivity and semantic modeling from Snowflake. &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Lead workshops and knowledge-sharing sessions on Snowflake and Power BI best practices for Lindt s BI teams. &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Help drive the adoption and scaling of Snowflake and Power BI, influencing ways of working locally and globally. &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Use Azure DevOps, Azure Repos, Snowflake Schemachange, and Streamlit in daily technical routines. &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Collaborate effectively; proactively propose process and architectural improvements. &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Manufacturing process knowledge is a plus, not a must. ETL tool ownership (HighByte, Lobster Data) is covered by other team members. &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Technical Skills Priority &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Advanced proficiency in Snowflake, DBT, SQL, Power BI (DAX, modeling, gateway configuration), Azure DevOps, Azure Repo, Streamlit, and Schemachange. &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Experience designing data warehouses, star schema, and handling master data management. &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Power BI dashboard development, dataset optimization, and best practice enablement. &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Excellent communication and workshop facilitation skills. &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Analyst&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;DXC Technology&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/811556/senior-snowflake-azure-sme-at-dxc-technology/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/811556/senior-snowflake-azure-sme-at-dxc-technology/</link>
  <title>[Full Time] Senior Snowflake Azure SME at DXC Technology</title>
  <dc:date>Thu, 12 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/811540/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;Project Role :&lt;/b&gt;Data Architect&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Purview&lt;b&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;12&lt;/b&gt; year(s) of experience is required&lt;b&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;&lt;b&gt;Summary&lt;/b&gt;:&lt;/b&gt; As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration. You will collaborate with various teams to ensure that the data architecture aligns with business objectives and technical specifications, while also addressing any challenges that arise during the development process. Your role will require a blend of analytical thinking and creative problem-solving to create efficient and scalable data solutions that meet the needs of the organization. Key Responsibilities: Function as the Lead Data Architect for a small, simple project/proposal or as a team lead for medium/large sized project or proposal Candidate Should have experience in Design, develop, and deploy solutions using Microsoft Purview for data governance, cataloguing, security, Quality and compliance. Integrate Microsoft Purview with various data sources and platforms to establish seamless data lineage, metadata management, and governance workflows. Configure and customize data classification, labelling, and sensitivity policies to ensure compliance with business standards and regulatory frameworks. Candidate should have understanding of Azure Date services like Azure Data factory, Azure Databricks, Fabric and Azure SQL database. Collaborate with data architects, analysts, and IT teams to map data sources and enforce governance best practices. Monitor and troubleshoot Purview services, ensuring optimal performance and integration with Azure and other data platforms. Document technical specifications, best practices, and governance workflows, providing training and guidance to data stewards and business users. Worked in implementation of AI, Copilot use cases for Data Governance Thought leadership and innovation Experience in handling OLTP and OLAP data workloads, Star Snowflake schema, Entity Relationship diagram and other data architecture related concepts&lt;br&gt;Technical Experience: Candidate should have strong experience in developing and deploying solutions with Microsoft Purview or similar data governance platforms. Proficiency in Microsoft Azure services, including Azure Data Factory, Azure Synapse Analytics, Azure SQL Database, Azure Blob Storage Delta Lake concepts. Strong understanding of data governance principles, including metadata management, data cataloging, lineage tracking, and compliance frameworks of Azure Purview. Experience with data integration, ETL pipelines, and data modelling to structure and organize enterprise-wide data. Expert level in Designing and Architect solutions in Azure Databricks, Azure Data factory, Azure Fabric. Candidate must have knowledge of Database like Azure SQL, Oracle and Azure Data Services like ADLS, Delta/Data Lake Candidate should have good understanding of Python, Pyspark and AI features of Data governance tool Experience in Profisee/Unity Catalog is an added advantage Candidate must have 8-15 years of IT experience and around 5+ years of extensive Data Governance experience Architect for a medium sized client delivery projectProfessional Experience: Should be able to drive the technology design meetings, propose technology design and architecture Should have excellent client communication skills Should have good analytical and problem-solving skills &lt;br&gt;Educational Qualification: Must have:BE/BTech/MCA Good to have:ME/MTech&lt;b&gt;Qualification&lt;/b&gt; 15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Pune&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/811540/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/811540/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Thu, 12 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/811577/power-bi-developer-at-crisil/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;&lt;b&gt;Role Summary&lt;/b&gt;&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/p&gt; &lt;p&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;We are looking for a highly skilled Power BI professional who can design, build, and implement end-to-end dashboards that translate business questions into clear, actionable insights.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/p&gt; &lt;p&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;This role requires more than technical proficiency - the ideal candidate should be excellent at understanding business requirements, structuring them into robust data models, and delivering sustainable, scalable BI solutions. A strong communication is a must.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/p&gt; &lt;p&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Business users may not always articulate system-level or design-level requirements clearly. The resource must bridge the gap between Ask and Final solution&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/p&gt; &lt;p&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;&lt;b&gt;Key Responsibilities&lt;/b&gt;&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/p&gt; &lt;ul&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Business Understanding &amp;amp; Requirement Translation&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Engage with business stakeholders to understand reporting needs, KPIs, and decision drivers.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Translate business requirements into well-structured BI solutions.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Challenge assumptions, ask the right questions, and propose better data and visualization approaches where needed.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Document business logic, assumptions, and metrics clearly.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Power BI Dashboard Design &amp;amp; Development&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Design and develop interactive, user-friendly, and performance-optimized dashboards using Power BI.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Build strong semantic models with reusable measures and standardized KPIs.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Create dashboards that are intuitive for non-technical business users.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Apply best practices in layout, storytelling, drill-downs, filters, and navigation.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;/ul&gt; &lt;p&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;&lt;b&gt;Data Modeling &amp;amp; DAX&lt;/b&gt;&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/p&gt; &lt;ul&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Design efficient data models (star/snowflake schemas where appropriate).&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Write optimized and maintainable DAX measures for complex calculations.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Ensure accuracy, consistency, and performance of calculations across reports.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Handle incremental refresh, large datasets, and performance tuning.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;/ul&gt; &lt;p&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;&lt;b&gt;Data Integration &amp;amp; Transformation&lt;/b&gt;&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/p&gt; &lt;ul&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Connect Power BI to multiple data sources (databases, Excel, CSVs, APIs, etc.).&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Use Power Query (M) for data cleaning, transformation, and shaping.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Ensure data quality, reconciliation, and consistency across sources.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;/ul&gt; &lt;p&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;&lt;b&gt;Deployment, Governance &amp;amp; Maintenance&lt;/b&gt;&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/p&gt; &lt;ul&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Manage Power BI Service workspaces, datasets, refresh schedules, and access controls.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Implement row-level security (RLS) where required.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Ensure dashboards are scalable, maintainable, and aligned with governance standards.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Provide ongoing support, enhancements, and performance improvements.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;/ul&gt; &lt;p&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;&lt;b&gt;Collaboration &amp;amp; Communication&lt;/b&gt;&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/p&gt; &lt;ul&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Work closely with business teams, data teams, and IT as required.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Explain dashboards, metrics, and logic in simple, business-friendly language.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Train users on dashboard usage and interpretation where needed.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;/ul&gt; &lt;p&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;&lt;b&gt;Required Skills &amp;amp; Experience&lt;/b&gt;&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/p&gt; &lt;ul&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;&lt;b&gt;Must-Have &lt;/b&gt;&lt;/span&gt;&lt;/span&gt;&lt;/span&gt; &lt;ul&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Proven hands-on expertise in Power BI (Power BI Desktop + Power BI Service)  &lt;b&gt;list all projects undertaken and deployed&lt;/b&gt;&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Minimum experience of least &lt;b&gt;5+ years as a stand-alone contributor&lt;/b&gt; on Power BI solution; &lt;b&gt;over all experience of 9+ years&lt;/b&gt;&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;&lt;b&gt;Strong experience in data modeling and DAX.&lt;/b&gt;&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Strong command of Power Query / M language.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Experience building dashboards from scratch, not just modifying existing ones.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Ability to handle poorly defined requirements and convert them into structured solutions.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Strong analytical thinking and problem-solving skills.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Excellent communication skills with non-technical stakeholders.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;&lt;b&gt;Good to Have&lt;/b&gt;&lt;/span&gt;&lt;/span&gt;&lt;/span&gt; &lt;ul&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Experience working with large datasets and performance optimization.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Exposure to SQL and relational databases.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Understanding of BI best practices, KPI frameworks, and reporting governance.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Prior experience in consulting, analytics, or business-facing BI roles.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Experience in operational, or management reporting is a plus.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;&lt;b&gt;What Success Looks Like in This Role&lt;/b&gt;&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/p&gt; &lt;ul&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Dashboards are trusted by business users and actively used for decision-making.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Business users have a sense of delight, confidence and ease when using the tool&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Minimal rework due to clear requirement understanding upfront.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Scalable and reusable data models rather than one-off reports.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Business users feel understood even when they cant articulate requirements perfectly.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Clean, documented, and maintainable Power BI assets.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;/ul&gt; &lt;p&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;&lt;b&gt;Ideal Candidate Profile&lt;/b&gt;&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/p&gt; &lt;ul&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Thinks like an analyst, designs like an architect, and executes like a developer.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Comfortable saying this can be done better and backing it with logic.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Detail-oriented, structured, and obsessed with clarity.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;li&gt;&lt;span&gt;&lt;span&gt;&lt;span&gt;Enjoys converting chaos into clean dashboards.&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;/li&gt; &lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Financial Services&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Analyst&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Crisil&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Mumbai&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/811577/power-bi-developer-at-crisil/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/811577/power-bi-developer-at-crisil/</link>
  <title>[Full Time] Power BI Developer at Crisil</title>
  <dc:date>Thu, 12 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/811993/principal-data-scientist-ai-application-development-expert-at-sap/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;strong&gt; &lt;span&gt; We help the world run better &lt;/span&gt; &lt;/strong&gt; &lt;span&gt; &lt;br&gt; At SAP, we keep it simple: you bring your best to us, and well bring out the best in you. Were builders touching over 20 industries and 80% of global commerce, and we need your unique talents to help shape whats next. The work is challenging but it matters. Youll find a place where you can be yourself, prioritize your wellbeing, and truly belong. Whats in it for youConstant learning, skill growth, great benefits, and a team that wants you to grow and succeed. &lt;br&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; What youll do: &lt;/span&gt; &lt;/div&gt; &lt;div&gt; In your role as Principal Data Scientist/ AI Application Development Expert, you will be responsible for the design, coding, testing, and quality assurance in a development team. You will assess and solve issues in new or existing code, and work with high attention to detail, reliability, and efficiency. You will collaborate closely with your team members to ensure success. In your day-to-day you will: &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; Work with a team of highly technical and motivated developers. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Collaborate and work with teams spread across geographies and time zones. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; You will work on the entire lifecycle of development projects, both customer specific and SAP-defined. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Design, develop, deploy and monitor efficient AI/ML modules that drives complex business applications. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Adhere to SAPs agile engineering practices and processes. Own your code from ideation to development, across the full stack and through QA and support. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; You will work on multiple topics and technologies with adequate enablement. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Product Delivery: Define product plans and execute against a committed delivery timeline; Manage the day-to-day activities of the development team &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Lead the team to execute enablement, governance, execution, delivery, research projects in Private Cloud organization. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Create architecture documents and concept papers for various new technologies and bring innovation with the team from product, process side. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Product Support: Manage customer expectations via product support ensuring high standards in incident processing, quality of interactions, and fixes that are delivered. Development Expertise: Provide functional, architectural guidance to the team. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Technical Hands-On: Maintain a hands-on approach to deliver highly available and scalable services with a strong focus on aspects such as resiliency, observability, scalability, monitoring, and FinOps. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Lead with example: Contribute towards teams success with your SAP application development experience and technical business domain knowledge. &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; What you bring: &lt;/span&gt; &lt;/div&gt; &lt;div&gt; You are a hands-on person with an inspiring can-do mentality focusing on outcomes and a strong delivery attitude. You are an empathetic and solution-oriented person, keeping the diverse developer communities of SAP s product engineering organisations and our customers and partners in focus. &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; University Degree in Computer Science, Software Engineering, or related field and 10+ years of experience in software product development with a focus on developing cloud services and solutions. &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; Overall 10+ years that include 5+ years of relevant work experience: software development, architecture, data science with cross-domain, cross-functional and cross-product expertise with demonstrated knowledge and skills that are both broad and deep. &lt;/span&gt; &lt;/li&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; Strong analytical problem solving and decision-making skills. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Proven hands-on experience in AI development, with a focus on Generative AI technologies. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Good understanding of SAP-relevant development processes and lifecycle, and how to establish a fast feedback and delivery cycle between customers and SAP &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Previous experience with business applications like S/4HANA Cloud, public &amp;amp; private edition is highly desirable. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Strong practical understanding of SAP Business Technology Platform (BTP) or any other cloud platform for development and integration. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Experience in prompt engineering and designing effective AI model. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Familiarity with Business Data Cloud (BDC), Agentic AI and its applications. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Familiarity with Agile methodologies and version control systems (e.g., Git). &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Proficiency in one or more programming languages such as ABAP/Python &lt;/span&gt; &lt;span&gt; /TypeScript/Nodejs and ML/AI frameworks such as Langraph, TensorFlow, PyTorch. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Foster a robust, unified team atmosphere by empowering the development team to engage in efficient, smooth cross-collaboration dynamics for optimized productivity. Demonstrate strong organizational skills and comfort in leading change. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Beneficial: Proven track record in planning and executing complex product/service delivery; Experience in defining success KPIs for delivery, ensuring smooth execution and maintaining service quality throughout the process. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Demonstrated ability to share and communicate ideas to executive staff, technical resources and other key constituents in clear, concise language. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Fluent English language skills, spoken and written. &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/ul&gt; &lt;div&gt; &lt;span&gt; Where you Belong: &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; SAP PCP Cross Technology and Architecture unit that is &lt;/span&gt; &lt;span&gt; driving technology empowering innovations in &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;span&gt; business and people. It is part of the &lt;/span&gt; &lt;span&gt; global organization Private Cloud Products (PCP) dedicated to delivering innovative solutions to meet our customers unique business process needs in Private Cloud. &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; You will be joining a dynamic and passionate team dedicated to leveraging AI to facilitate seamless interactions between individuals and information. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; We foster a culture of collaboration, learning, and growth, encouraging all team members to contribute ideas and perspectives. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Enjoy the benefits of working within a diverse and international environment, with opportunities to collaborate with colleagues from around the globe. &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;br&gt; &lt;strong&gt; &lt;span&gt; We win with inclusion &lt;/span&gt; &lt;/strong&gt; &lt;span&gt; &lt;br&gt; . SAP is committed to the values of Equal Employment Opportunity and provides accessibility accommodations to applicants with physical and/or mental disabilities. If you are interested in applying for employment with SAP and are in need of accommodation or special assistance to navigate our website or to complete your application, please send an e-mail with your request to Recruiting Operations Team: . For SAP employees: Only permanent roles are eligible for the &lt;/span&gt; &lt;span&gt; SAP Employee Referral Program &lt;span&gt; , according to the eligibility rules set in the SAP Referral Policy. Specific conditions may apply for roles in Vocational Training. &lt;/span&gt; &lt;/span&gt; &lt;span&gt; Successful candidates might be required to undergo a background verification with an external vendor. &lt;/span&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;SAP&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/811993/principal-data-scientist-ai-application-development-expert-at-sap/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/811993/principal-data-scientist-ai-application-development-expert-at-sap/</link>
  <title>[Full Time] Principal Data Scientist AI Application Development Expert at SAP</title>
  <dc:date>Thu, 12 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/811525/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;Project Role :&lt;/b&gt;Data Architect&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration.&lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Purview&lt;b&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;5&lt;/b&gt; year(s) of experience is required&lt;b&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;&lt;b&gt;Summary&lt;/b&gt;:&lt;/b&gt; As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration. You will collaborate with various teams to ensure that the data architecture aligns with business objectives and technical specifications, while also addressing any challenges that arise in the data management process. Your role will require you to engage in discussions about data governance and best practices, ensuring that the data architecture is robust, scalable, and efficient. You will also be responsible for documenting the data architecture and providing guidance to team members on implementation strategies.&lt;b&gt;Key Responsibilities:&lt;/b&gt;&lt;li&gt;Function as the Lead Data Architect for a small, simple project/proposal or as a team lead for medium/large sized project or proposal&lt;/li&gt;&lt;li&gt;Candidate Should have experience in Design, develop, and deploy solutions using Microsoft Purview for data governance, cataloguing, security, Quality and compliance.&lt;/li&gt;&lt;li&gt;Integrate Microsoft Purview with various data sources and platforms to establish seamless data lineage, metadata management, and governance workflows.&lt;/li&gt;&lt;li&gt;Configure and customize data classification, labelling, and sensitivity policies to ensure compliance with business standards and regulatory frameworks.&lt;/li&gt;&lt;li&gt;Candidate should have understanding of Azure Date services like Azure Data factory, Azure Databricks, Fabric and Azure SQL database.&lt;/li&gt;&lt;li&gt;Collaborate with data architects, analysts, and IT teams to map data sources and enforce governance best practices.&lt;/li&gt;&lt;li&gt;Monitor and troubleshoot Purview services, ensuring optimal performance and integration with Azure and other data platforms.&lt;/li&gt;&lt;li&gt;Document technical specifications, best practices, and governance workflows, providing training and guidance to data stewards and business users.&lt;/li&gt;&lt;li&gt;Worked in implementation of AI, Copilot use cases for Data Governance&lt;/li&gt;&lt;li&gt;Thought leadership and innovation&lt;/li&gt;&lt;li&gt;Experience in handling OLTP and OLAP data workloads, Star Snowflake schema, Entity Relationship diagram and other data architecture related concepts&lt;b&gt;Technical Experience:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;Candidate should have strong experience in developing and deploying solutions with Microsoft Purview or similar data governance platforms.&lt;/li&gt;&lt;li&gt;Proficiency in Microsoft Azure services, including Azure Data Factory, Azure Synapse Analytics, Azure SQL Database, Azure Blob Storage Delta Lake concepts.&lt;/li&gt;&lt;li&gt;Strong understanding of data governance principles, including metadata management, data cataloging, lineage tracking, and compliance frameworks of Azure Purview.&lt;/li&gt;&lt;li&gt;Experience with data integration, ETL pipelines, and data modelling to structure and organize enterprise-wide data.&lt;/li&gt;&lt;li&gt;Expert level in Designing and Architect solutions in Azure Databricks, Azure Data factory, Azure Fabric.&lt;/li&gt;&lt;li&gt;Candidate must have knowledge of Database like Azure SQL, Oracle and Azure Data Services like ADLS, Delta/Data Lake &lt;/li&gt;&lt;li&gt;Candidate should have good understanding of Python, Pyspark and AI features of Data governance tool&lt;/li&gt;&lt;li&gt;Experience in Profisee/Unity Catalog is an added advantage&lt;/li&gt;&lt;li&gt;Candidate must have 5-8 years of IT experience and around 2+ years Data &lt;/li&gt;&lt;li&gt;Governance experience &lt;/li&gt;&lt;li&gt;Architect for a medium sized client delivery project&lt;b&gt;Professional Experience:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;Should be able to drive the technology design meetings, propose technology design and architecture &lt;/li&gt;&lt;li&gt;Should have excellent client communication skills&lt;/li&gt;&lt;li&gt;Should have good analytical and problem-solving skills &lt;b&gt;Educational Qualification:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;Must have:BE/BTech/MCA&lt;/li&gt;&lt;li&gt;Good to have:ME/MTech&lt;b&gt;Qualification&lt;/b&gt; 15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/811525/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/811525/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Thu, 12 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/811518/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;Project Role :&lt;/b&gt;Data Architect&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Purview&lt;b&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;12&lt;/b&gt; year(s) of experience is required&lt;b&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;&lt;b&gt;Summary&lt;/b&gt;:&lt;/b&gt; As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration. You will collaborate with various teams to ensure that the data architecture aligns with business objectives and technical specifications, while also addressing any challenges that arise during the development process. Your role will require a blend of analytical thinking and creative problem-solving to create efficient and scalable data solutions that meet the needs of the organization. Key Responsibilities: Function as the Lead Data Architect for a small, simple project/proposal or as a team lead for medium/large sized project or proposal Candidate Should have experience in Design, develop, and deploy solutions using Microsoft Purview for data governance, cataloguing, security, Quality and compliance. Integrate Microsoft Purview with various data sources and platforms to establish seamless data lineage, metadata management, and governance workflows. Configure and customize data classification, labelling, and sensitivity policies to ensure compliance with business standards and regulatory frameworks. Candidate should have understanding of Azure Date services like Azure Data factory, Azure Databricks, Fabric and Azure SQL database. Collaborate with data architects, analysts, and IT teams to map data sources and enforce governance best practices. Monitor and troubleshoot Purview services, ensuring optimal performance and integration with Azure and other data platforms. Document technical specifications, best practices, and governance workflows, providing training and guidance to data stewards and business users. Worked in implementation of AI, Copilot use cases for Data Governance Thought leadership and innovation Experience in handling OLTP and OLAP data workloads, Star Snowflake schema, Entity Relationship diagram and other data architecture related concepts&lt;br&gt;Technical Experience: Candidate should have strong experience in developing and deploying solutions with Microsoft Purview or similar data governance platforms. Proficiency in Microsoft Azure services, including Azure Data Factory, Azure Synapse Analytics, Azure SQL Database, Azure Blob Storage Delta Lake concepts. Strong understanding of data governance principles, including metadata management, data cataloging, lineage tracking, and compliance frameworks of Azure Purview. Experience with data integration, ETL pipelines, and data modelling to structure and organize enterprise-wide data. Expert level in Designing and Architect solutions in Azure Databricks, Azure Data factory, Azure Fabric. Candidate must have knowledge of Database like Azure SQL, Oracle and Azure Data Services like ADLS, Delta/Data Lake Candidate should have good understanding of Python, Pyspark and AI features of Data governance tool Experience in Profisee/Unity Catalog is an added advantage For Level 7,8 - Candidate must have 8-15 years of IT experience and around 5+ years of extensive Data Governance experience oFor Level 9,10 - Candidate must have 5-8 years of IT experience and around 2+ years Data oGovernance experience Architect for a medium sized client delivery projectProfessional Experience: Should be able to drive the technology design meetings, propose technology design and architecture Should have excellent client communication skills Should have good analytical and problem-solving skills &lt;br&gt;Educational Qualification: Must have:BE/BTech/MCA Good to have:ME/MTech&lt;b&gt;Qualification&lt;/b&gt; 15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/811518/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/811518/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Thu, 12 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/811825/analyst-at-eclerx/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;strong&gt;Apprentice_Analyst Roles and responsibilities:&lt;/strong&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt;Data enrichment/gap fill, adding attributes, standardization, normalization, and categorization of online and offline product on the CMS platform (Content Management).&lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Should have good understanding of HTML, CSS, Javascript.&lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Data quality check and correction&lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Data profiling and reporting (basic)&lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Email communication with the client on request acknowledgment, project status and response on queries&lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Help customers in enhancing their product data quality from the technical specification and description perspective&lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Provide technical consulting to the customer category managers around the industry best practices of product data enhancement&lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;strong&gt; Technical and Functional Skills:&lt;/strong&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt;Bachelor s Degree (Any Graduate)&lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Good Understanding of tools and technology.&lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Intermediate knowledge of MS Office/Internet.&lt;/span&gt; &lt;/li&gt; &lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Analytics / KPO / Research&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Analyst&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;eClerx&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Mumbai&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/811825/analyst-at-eclerx/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/811825/analyst-at-eclerx/</link>
  <title>[Full Time] Analyst at eClerx</title>
  <dc:date>Thu, 12 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/810548/qa-with-azure-and-databricks-professional-at-infobeans/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt;&lt;div&gt;&lt;p&gt;&lt;b&gt;&lt;span&gt;Key Responsibilities&lt;/span&gt;&lt;/b&gt;&lt;br&gt;&lt;span&gt;&lt;strong&gt;&lt;/strong&gt;&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;&lt;strong&gt;1) Test Strategy Planning&lt;/strong&gt;&lt;/span&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Define and maintain &lt;b&gt;test strategies&lt;/b&gt; for data pipelines, notebooks, jobs, and APIs across Databricks Azure.&lt;/li&gt;&lt;li&gt;Translate &lt;b&gt;business rules&lt;/b&gt; and &lt;b&gt;data quality SLAs&lt;/b&gt; into testable acceptance criteria.&lt;/li&gt;&lt;li&gt;Establish &lt;b&gt;risk-based&lt;/b&gt; test plans across functional, integration, system, and &lt;b&gt;non-functional&lt;/b&gt; (performance, scalability, security) testing.&lt;/li&gt;&lt;/ul&gt;&lt;div&gt;&lt;span&gt;&lt;strong&gt;2) Data Pipeline Testing (Databricks / Spark / Delta)&lt;/strong&gt;&lt;/span&gt;&lt;/div&gt;&lt;ul&gt;&lt;li&gt;Develop &lt;b&gt;automated tests&lt;/b&gt; for &lt;b&gt;Spark&lt;/b&gt; transformations (e.g., &lt;b&gt;PySpark&lt;/b&gt;, &lt;b&gt;Spark SQL&lt;/b&gt;) and &lt;b&gt;Delta Lake&lt;/b&gt; features (MERGE, time travel, schema evolution).&lt;/li&gt;&lt;li&gt;Implement &lt;b&gt;data quality checks&lt;/b&gt; (completeness, accuracy, consistency, referential integrity, drift detection).&lt;/li&gt;&lt;li&gt;Validate &lt;b&gt;batch and streaming&lt;/b&gt; pipelines, job dependencies, and SLAs.&lt;/li&gt;&lt;li&gt;Create &lt;b&gt;reproducible test datasets&lt;/b&gt; and use &lt;b&gt;Delta Live Tables&lt;/b&gt; (if applicable) to validate expectations.&lt;/li&gt;&lt;/ul&gt;&lt;div&gt;&lt;span&gt;&lt;strong&gt;3) Azure Cloud QA&lt;/strong&gt;&lt;/span&gt;&lt;/div&gt;&lt;ul&gt;&lt;li&gt;Test data flows across &lt;b&gt;Azure Data Factory / Synapse pipelines&lt;/b&gt;, Databricks Jobs, &lt;b&gt;Azure Storage/ADLS&lt;/b&gt;, &lt;b&gt;Azure SQL/SQL MI&lt;/b&gt;, &lt;b&gt;Event Hub/Kafka&lt;/b&gt;, and &lt;b&gt;Key Vault&lt;/b&gt; access patterns.&lt;/li&gt;&lt;li&gt;Verify &lt;b&gt;IAM/ACLs/Unity Catalog&lt;/b&gt; permissions, secrets handling, and &lt;b&gt;network boundary&lt;/b&gt; controls.&lt;/li&gt;&lt;li&gt;Validate &lt;b&gt;infrastructure configurations&lt;/b&gt; across environments (Dev/Test/Prod) and support &lt;b&gt;IaC&lt;/b&gt; validation (e.g., Bicep/Terraform plan diffs).&lt;/li&gt;&lt;/ul&gt;&lt;div&gt;&lt;span&gt;&lt;strong&gt;4) Automation DevOps&lt;/strong&gt;&lt;/span&gt;&lt;/div&gt;&lt;ul&gt;&lt;li&gt;Integrate tests into &lt;b&gt;CI/CD&lt;/b&gt; (Azure DevOps/GitHub Actions): unit, integration, data-validation, and &lt;b&gt;post-deployment&lt;/b&gt; smoke tests.&lt;/li&gt;&lt;li&gt;Implement &lt;b&gt;quality gates&lt;/b&gt; (coverage thresholds, schema checks, contract tests, static checks like Pylint/flake8).&lt;/li&gt;&lt;li&gt;Build &lt;b&gt;test utilities&lt;/b&gt; and shared libraries for data assertions, fixtures, and synthetic data generation.&lt;/li&gt;&lt;li&gt;Use &lt;b&gt;feature branches&lt;/b&gt;, PR checks, and &lt;b&gt;merge policies&lt;/b&gt; to enforce quality.&lt;/li&gt;&lt;/ul&gt;&lt;div&gt;&lt;span&gt;&lt;strong&gt;5) Observability Reliability&lt;/strong&gt;&lt;/span&gt;&lt;/div&gt;&lt;ul&gt;&lt;li&gt;Instrument tests and pipelines with &lt;b&gt;logging metrics&lt;/b&gt; (e.g., Azure Monitor, Log Analytics, Databricks metrics).&lt;/li&gt;&lt;li&gt;Define &lt;b&gt;SLIs/SLOs&lt;/b&gt; for data quality and &lt;b&gt;pipeline reliability&lt;/b&gt;; participate in incident reviews and RCA.&lt;/li&gt;&lt;li&gt;Establish &lt;b&gt;quality dashboards&lt;/b&gt; (test pass rate, DQ incidents, recovery time, flaky test rate).&lt;/li&gt;&lt;/ul&gt;&lt;div&gt;&lt;span&gt;&lt;strong&gt;6) Collaboration Governance&lt;/strong&gt;&lt;/span&gt;&lt;/div&gt;&lt;ul&gt;&lt;li&gt;Partner with &lt;b&gt;data engineers, platform engineers, product owners, and data stewards&lt;/b&gt;.&lt;/li&gt;&lt;li&gt;Contribute to &lt;b&gt;test data management&lt;/b&gt; practices and a &lt;b&gt;data contract&lt;/b&gt; approach with upstream/downstream teams.&lt;/li&gt;&lt;li&gt;Document test plans, runbooks, and &lt;b&gt;traceability&lt;/b&gt; from requirements to tests.&lt;/li&gt;&lt;/ul&gt;&lt;div&gt;&lt;b&gt;&lt;span&gt;Required Qualifications&lt;/span&gt;&lt;/b&gt;&lt;br&gt;&lt;/div&gt;&lt;ul&gt;&lt;li&gt;&lt;b&gt;3-8+ years&lt;/b&gt; in QA/Test Engineering with &lt;b&gt;2+ years&lt;/b&gt; in &lt;b&gt;data platform&lt;/b&gt; or &lt;b&gt;Spark-based&lt;/b&gt; environments.&lt;/li&gt;&lt;li&gt;Hands-on with &lt;b&gt;Databricks&lt;/b&gt;: Notebooks, Jobs, Delta Lake, cluster policies.&lt;/li&gt;&lt;li&gt;Strong &lt;b&gt;Python&lt;/b&gt; for tests/utilities; solid &lt;b&gt;SQL&lt;/b&gt; for validation and profiling.&lt;/li&gt;&lt;li&gt;Experience testing &lt;b&gt;ETL/ELT&lt;/b&gt; patterns, data partitioning, late-arriving data, slowly changing dimensions, idempotency.&lt;/li&gt;&lt;li&gt;&lt;b&gt;Azure&lt;/b&gt; experience: &lt;b&gt;Data Factory&lt;/b&gt;, &lt;b&gt;ADLS Gen2&lt;/b&gt;, &lt;b&gt;Key Vault&lt;/b&gt;, &lt;b&gt;Azure DevOps/GitHub&lt;/b&gt;.&lt;/li&gt;&lt;li&gt;CI/CD for data: pipelines, artifacts, environments, approvals, &lt;b&gt;test stages&lt;/b&gt; and &lt;b&gt;post-deploy&lt;/b&gt; validations.&lt;/li&gt;&lt;li&gt;Familiar with &lt;b&gt;schema validation &lt;/b&gt;and &lt;b&gt;API testing&lt;/b&gt; (REST/Databricks Jobs API).&lt;/li&gt;&lt;li&gt;Clear understanding of &lt;b&gt;security&lt;/b&gt; (RBAC/ABAC, managed identities), &lt;b&gt;cost performance&lt;/b&gt; considerations.&lt;/li&gt;&lt;/ul&gt;&lt;div&gt;&lt;span&gt;&lt;strong&gt;Nice-to-Have&lt;/strong&gt;&lt;/span&gt;&lt;/div&gt;&lt;ul&gt;&lt;li&gt;&lt;b&gt;Unity Catalog&lt;/b&gt;, data lineage, and governance testing.&lt;/li&gt;&lt;li&gt;&lt;b&gt;Streaming&lt;/b&gt; (Structured Streaming, Delta Live Tables), &lt;b&gt;event-driven&lt;/b&gt; validation.&lt;/li&gt;&lt;li&gt;&lt;b&gt;Performance testing&lt;/b&gt; of Spark jobs (cluster sizing, shuffle hotspots, skew).&lt;/li&gt;&lt;li&gt;Exposure to &lt;b&gt;privacy&lt;/b&gt;&lt;b&gt;regulatory&lt;/b&gt; controls (PII masking, GDPR/CCPA, SOX).&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;b&gt;Location - &lt;/b&gt;Pune,Nagar,Indore,Bengaluru&lt;/p&gt;&lt;/div&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Architect / Designer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Infobeans&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Indore&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/810548/qa-with-azure-and-databricks-professional-at-infobeans/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/810548/qa-with-azure-and-databricks-professional-at-infobeans/</link>
  <title>[Full Time] QA with Azure and Databricks Professional at Infobeans</title>
  <dc:date>Wed, 11 Feb 2026 14:46:30 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/810396/senior-software-engineer-power-bi-developer-at-cgi/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&lt;strong&gt;Education Qualification: &lt;/strong&gt;&lt;/p&gt;&lt;p&gt;Bachelors degree in computer science or related field or higher with minimum 4 years of relevant experience.We are looking for a seasoned Power BI developer with proven expertise in designing, developing, and deploying enterprise-level Business Intelligence (BI) solutions. The ideal candidate will bring 3+ years of experience in BI and data analytics, with a strong track record of delivering scalable, high-quality solutions. This leadership role combines hands-on technical development, team management, and stakeholder collaboration, ensuring that BI initiatives effectively support critical business decisions.&lt;br&gt;&lt;/p&gt;&lt;p&gt;&lt;br&gt;&lt;b&gt;Your future duties and responsibilities:&lt;/b&gt;&lt;br&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;end-to-end development of interactive Power BI dashboards and data models.&lt;/li&gt;&lt;li&gt;Partner with business stakeholders to translate requirements into impactful BI solutions.&lt;/li&gt;&lt;li&gt;Design and optimize complex SQL queries, stored procedures, and ETL processes for seamless dataintegration.&lt;/li&gt;&lt;li&gt;Build and maintain SSRS and Report Builder reports for enterprise-wide reporting needs.&lt;/li&gt;&lt;li&gt;Ensure data accuracy, security, performance, and scalability across all BI deliverables.&lt;/li&gt;&lt;li&gt;Provide technical leadership and mentorship to BI developers; conduct code reviews and enforcebest practices.&lt;/li&gt;&lt;li&gt;Stay current with emerging BI technologies and recommend improvements to reportingarchitecture.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;b&gt;Required qualifications to be successful in this role:&lt;/b&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Must have Skills-3+ years of experience in Business Intelligence, Data Analytics, and Reporting.&lt;/li&gt;&lt;li&gt;Strong understanding of data models, attributes, and regulatory requirements.&lt;/li&gt;&lt;li&gt;Expert-level proficiency in Power BI (DAX, Power Query, data modeling, RLS, performance tuning).&lt;/li&gt;&lt;li&gt;Advanced hands-on experience in SQL (complex queries, stored procedures, performanceoptimization).&lt;/li&gt;&lt;li&gt;Proven expertise in SSRS and Report Builder.Knowledge of data warehousing concepts and ETL workflows.&lt;/li&gt;&lt;li&gt;Excellent analytical, troubleshooting, and stakeholder management skills.&lt;/li&gt;&lt;li&gt;Prior experience in leading or mentoring BI teams.&lt;/li&gt;&lt;li&gt;Exposure to DevOps, CI/CD, automation, and data governance.&lt;/li&gt;&lt;li&gt;Experience managing global, multi-stakeholder BI programs.&lt;/li&gt;&lt;li&gt;Ability to implement automation, monitoring, and explore ML/AI integration for reporting enhancements.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Azure DevOps&lt;/li&gt;&lt;li&gt;English&lt;/li&gt;&lt;/ul&gt;&lt;div&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;CGI&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/810396/senior-software-engineer-power-bi-developer-at-cgi/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/810396/senior-software-engineer-power-bi-developer-at-cgi/</link>
  <title>[Full Time] Senior Software Engineer- Power BI Developer at CGI</title>
  <dc:date>Wed, 11 Feb 2026 13:22:32 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/812804/azure-data-engineer-at-hdfc-bank/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&lt;strong&gt;Job Role&lt;/strong&gt; - Sr / Lead Azure Data Engineer&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Location&lt;/strong&gt; - Navi Mumbai / Bangalore / Noida &amp;amp; Gurugram&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Experience&lt;/strong&gt; - 05 Years to 14 Years&lt;/p&gt;&lt;p&gt;&lt;br&gt;&lt;strong&gt;Job Responsibilities: &lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;ol type=&quot;1&quot;&gt;&lt;li&gt;Fundamentals of DevOps, DevSecOps, CD / CI Pipeline using ADO&lt;/li&gt;&lt;li&gt;Good understanding of MPP Architecture, MySQL, RDS, MS&lt;/li&gt;&lt;li&gt;SQL DB, Oracle ,Postgres DB&lt;/li&gt;&lt;li&gt;Would need to interact with Software Integrators on a day-today basis.&lt;/li&gt;&lt;li&gt;Deployment and testing skills&lt;/li&gt;&lt;li&gt;Strong communication skills&lt;/li&gt;&lt;li&gt;ELT - Trino, Azure&amp;nbsp;Data&amp;nbsp;factory, Azure&amp;nbsp;Databricks, PySpark, Python, Iceberg, Parquet&lt;/li&gt;&lt;li&gt;CDC Tool like Qlik/ Golden Gate/Dbsium/IBM CDC, Kafka/ Solace  Scripting Shell, Python, Java,&lt;/li&gt;&lt;li&gt;Good Understanding of Azure Cloud&amp;nbsp;Engineering ADLS, Iceberg,&amp;nbsp;Databricks, AKS, RHEL&lt;/li&gt;&lt;li&gt;Good understanding of MS Project&lt;/li&gt;&lt;li&gt;Development skill using Trino, PySpark and&amp;nbsp;Databricks&lt;/li&gt;&lt;li&gt;Understanding of security basics, Encryption/Decryption,&lt;/li&gt;&lt;li&gt;Understanding of IT hardware basics: Unix/Windows servers, RAM/CPU utilization, storage on cloud&lt;/li&gt;&lt;li&gt;Basic project management skills for preparation of a high-level project plan.&lt;/li&gt;&lt;li&gt;Understanding of DNS and Load Balancing, and their use.&lt;/li&gt;&lt;li&gt;&amp;nbsp;Understanding of DR/BCP/Recovery/Backup conceptually for DB and Apply Servers&lt;/li&gt;&lt;/ol&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Banking&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Hdfc Bank&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Noida, Gurugram&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/812804/azure-data-engineer-at-hdfc-bank/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/812804/azure-data-engineer-at-hdfc-bank/</link>
  <title>[Full Time] Azure Data Engineer at Hdfc Bank</title>
  <dc:date>Wed, 11 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/810498/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Data Science&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;3&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the data architecture aligns with the overall business objectives and technical specifications. You will collaborate with various stakeholders to gather requirements and translate them into effective data solutions, while also addressing any challenges that arise during the development process. &lt;br&gt;Roles &amp;amp; Responsibilities:&lt;li&gt;Expected to perform independently and become an SME.&lt;/li&gt;&lt;li&gt;Required active participation/contribution in team discussions.&lt;/li&gt;&lt;li&gt;Contribute in providing solutions to work related problems.&lt;/li&gt;&lt;li&gt;Engage in continuous learning to stay updated with industry trends and best practices.&lt;/li&gt;&lt;li&gt;Collaborate with cross-functional teams to ensure data architecture meets business needs. &lt;b&gt;Professional &amp;amp; Technical Skills:&lt;/b&gt;&lt;br&gt;&lt;b&gt;Must To Have Skills:&lt;/b&gt;&lt;br&gt;Proficiency in Data Science.&lt;/li&gt;&lt;li&gt;Strong analytical skills to interpret complex data sets.&lt;/li&gt;&lt;li&gt;Experience with data modeling and database design.&lt;/li&gt;&lt;li&gt;Familiarity with data integration techniques and tools.&lt;/li&gt;&lt;li&gt;Knowledge of data governance and data quality principles. Additional Information:&lt;/li&gt;&lt;li&gt;The candidate should have minimum 3 years of experience in Data Science.&lt;/li&gt;&lt;li&gt;This position is based at our Bengaluru office.&lt;/li&gt;&lt;li&gt;A 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/810498/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/810498/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Wed, 11 Feb 2026 11:47:28 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/810405/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Databricks Unified Data Analytics Platform&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the data architecture aligns with the overall business objectives and technical specifications. You will collaborate with various teams to ensure that the data architecture is robust, scalable, and efficient, while also addressing any challenges that arise during the development process. Your role will be pivotal in shaping the data landscape of the organization, enabling data-driven decision-making and fostering innovation through effective data management practices. Responsibilities:Develop high-quality, scalable ETL/ELT pipelines using Databricks technologies including Delta Lake, Auto Loader, and DLT.Excellent programming and debugging skills in Python.Strong hands-on experience with Py Spark to build efficient data transformation and validation logic.Must be proficient in at least one cloud platform:AWS, GCP, or Azure.Create modular DBX functions for transformation, PII masking, and validation logic reusable across DLT and notebook pipelines.Implement ingestion patterns using Auto Loader with checkpointing and schema evolution for structured and semi-structured data.Build secure and observable DLT pipelines with DLT Expectations, supporting Bronze/Silver/Gold medallion layering.Configure Unity Catalog:set up catalogs, schemas, user/group access, enable audit logging, and define masking for PII fields.Enable secure data access across domains and workspaces via Unity Catalog External Locations, Volumes, and lineage tracking.Access and utilize data assets from the Databricks Marketplace to support enrichment, model training, or benchmarking.Collaborate with data sharing stakeholders to implement Delta Sharing both internally and externally.Integrate Power BI/Tableau/Looker with Databricks using optimized connectors (ODBC/JDBC) and Unity Catalog security controls.Build stakeholder-facing SQL Dashboards within Databricks to monitor KPIs, data pipeline health, and operational SLAs.Prepare Gen AI-compatible datasets:manage vector embeddings, index with Databricks Vector Search, and use Feature Store with ML flow.Package and deploy pipelines using Databricks Asset Bundles through CI/CD pipelines in GitHub or GitLab.Troubleshoot, tune, and optimize jobs using Photon engine and serverless compute, ensuring cost efficiency and SLA reliability.Experience with cloud-based services relevant to data engineering, data storage, data processing, data warehousing, real-time streaming, and serverless computing.Hands on Experience in applying Performance optimization techniquesUnderstanding data modeling and data warehousing principles is essential.Nice to Have:1.Certifications:Databricks Certified Professional or similar certifications.2.Machine Learning:Knowledge of machine learning concepts and experience with popular ML libraries.3.Knowledge of big data processing (e.g., Spark, Hadoop, Hive, Kafka)4.Data Orchestration:Apache Airflow.5.Knowledge of CI/CD pipelines and DevOps practices in a cloud environment.6.Experience with ETL tools like Informatica, Talend, Mati Llion, or Five Tran.7.Familiarity with DBT (Data Build Tool)Additional Information:- The candidate should have minimum 7.5 years of experience in Databricks Unified Data Analytics Platform.- This position is based at our Bengaluru office.&lt;br&gt;Educational Qualification:- 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/810405/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/810405/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Wed, 11 Feb 2026 11:04:30 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/810483/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;b&gt;Must have skills :&lt;/b&gt;Data Architecture Principles&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;12&lt;/b&gt; year(s) of experience is required&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day involves modeling and designing the application data structure, storage, and integration, ensuring that the data architecture aligns with the overall business objectives and technical specifications. You will collaborate with various teams to ensure that the data architecture supports the applications functionality and performance needs, while also considering scalability and security aspects. &lt;br&gt;Roles &amp;amp; Responsibilities:&lt;li&gt;Expected to be an SME.&lt;/li&gt;&lt;li&gt;Collaborate and manage the team to perform.&lt;/li&gt;&lt;li&gt;Responsible for team decisions.&lt;/li&gt;&lt;li&gt;Engage with multiple teams and contribute on key decisions.&lt;/li&gt;&lt;li&gt;Expected to provide solutions to problems that apply across multiple teams.&lt;/li&gt;&lt;li&gt;Facilitate workshops and discussions to gather requirements and feedback from stakeholders.&lt;/li&gt;&lt;li&gt;Mentor junior professionals in best practices and emerging trends in data architecture. Professional &amp;amp; Technical &lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;&lt;br&gt;Must To Have &lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;Proficiency in Data Architecture Principles.&lt;/li&gt;&lt;li&gt;Strong understanding of data modeling techniques and methodologies.&lt;/li&gt;&lt;li&gt;Experience with data integration tools and ETL processes.&lt;/li&gt;&lt;li&gt;Knowledge of database management systems and data storage solutions.&lt;/li&gt;&lt;li&gt;Familiarity with cloud-based data architecture and services. &lt;br&gt;Additional Information:&lt;/li&gt;&lt;li&gt;The candidate should have minimum 12 years of experience in Data Architecture Principles.&lt;/li&gt;&lt;li&gt;This position is based at our Bengaluru office.&lt;/li&gt;&lt;li&gt;A 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/810483/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/810483/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Wed, 11 Feb 2026 08:50:15 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/810338/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Reltio, Informatica MDM&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;3&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the data architecture aligns with business needs and technical specifications. You will collaborate with various stakeholders to gather requirements and translate them into effective data solutions, while also addressing any challenges that arise during the development process. Your role will be pivotal in establishing a robust data framework that supports the organization&apos;s objectives and enhances data accessibility and usability. Roles &amp;amp; Responsibilities:- Expected to perform independently and become an SME.- Required active participation/contribution in team discussions.- Contribute in providing solutions to work related problems.- Engage in continuous learning to stay updated with industry trends and best practices.- Collaborate with cross-functional teams to ensure data architecture meets business requirements.&quot;Work with team to design, build and configure applications to meet business process and application requirements which involve the following tasks:Contribute in all phases of the development lifecycleWrite well designed, testable, efficient codeEnsure designs are in compliance with specificationsCollaborate with cross-functional teams including business analysts, data architects, and QA&quot;Informatica MDM, Core Java, Rest API, J2EE Professional &amp;amp; Technical &lt;b&gt;Skills:&lt;/b&gt;&lt;li&gt;- Must To Have &lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;Proficiency in Reltio, Informatica MDM.- Strong understanding of data modeling techniques and best practices.- Experience with data integration tools and methodologies.- Familiarity with cloud-based data solutions and architectures.- Ability to analyze and optimize data storage and retrieval processes.&quot;Must Have &lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;1. Implement, configure and manage Reltio MDM solutions.2. Develop and maintain Reltio data models, including entities, attributes, and relationships.3. Ensure data quality and integrity through regular monitoring and maintenance4. Configure and fine-tune match rules and survivorship strategies to ensure accurate master records.5. Develop and manage REST API integrations with upstream and downstream systems.6. Handle batch and real-time data ingestion, transformation logic, and mapping using Reltio Integration Hub or external ETL tools.7. Support:Investigate any issues reported in Production and work on fix. 8. Ability to identify, prioritize, and execute tasks to meet critical project deadlines.9. Ability to work well and collaborate with peers in a team-oriented environment with minimal supervision.10. Experience in analysis, design development, testing and implementation of enterprise scale apps.11. Minimum 5 years of Experience in ReltioGood to Have &lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;1. Knowledge/Experience in GCP/Azure cloud2. Knowledge/Experience of Agile methodology and Sprint based project delivery3. Knowledge/Experience in Azure DevOps.&quot;&quot; Should be good team player. Good Written &amp;amp; Verbal Communication skills Good analytical and troubleshooting skills.&quot; Additional Information:- The candidate should have minimum 3 years of experience in Reltio.- This position is based at our Pune office.- A 15 years full time education is required.Resource should be open to work in B Shift (Afternoon Shifts 12:30 to 10:30) during week days.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Pune&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/810338/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/810338/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Wed, 11 Feb 2026 08:40:13 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/810387/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Azure Data Services&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the data architecture aligns with the overall business objectives and technical specifications. You will collaborate with various stakeholders to gather requirements and translate them into effective data solutions, while also addressing any challenges that arise during the development process. Your role will be pivotal in ensuring that the data architecture is robust, scalable, and efficient, ultimately supporting the organization&apos;s data strategy and goals. Roles &amp;amp; Responsibilities:- Expected to be an SME.- Collaborate and manage the team to perform.- Responsible for team decisions.- Engage with multiple teams and contribute on key decisions.- Provide solutions to problems for their immediate team and across multiple teams.- Facilitate knowledge sharing and mentoring within the team to enhance overall performance.- Continuously assess and improve data architecture practices to ensure alignment with industry standards. Professional &amp;amp; Technical &lt;b&gt;Skills:&lt;/b&gt;&lt;li&gt;- Must To Have &lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;Proficiency in Microsoft Azure Data Services.- Experience with data modeling and database design.- Strong understanding of data integration techniques and tools.- Familiarity with cloud-based data storage solutions and architectures.- Ability to implement data governance and security measures. Additional Information:- The candidate should have minimum 7.5 years of experience in Microsoft Azure Data Services.- This position is based at our Bengaluru office.- A 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/810387/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/810387/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Wed, 11 Feb 2026 04:11:05 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/810326/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Azure Databricks&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the architecture aligns with business needs and technical specifications. You will collaborate with various teams to ensure that data flows seamlessly and efficiently throughout the organization, contributing to the overall success of data-driven initiatives. Roles &amp;amp; Responsibilities:- Expected to be an SME.- Collaborate and manage the team to perform.- Responsible for team decisions.- Engage with multiple teams and contribute on key decisions.- Provide solutions to problems for their immediate team and across multiple teams.- Facilitate knowledge sharing sessions to enhance team capabilities.- Develop and maintain documentation related to data architecture and design. Professional &amp;amp; Technical &lt;b&gt;Skills:&lt;/b&gt;&lt;li&gt;- Must To Have &lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;Proficiency in Microsoft Azure Databricks.- Strong understanding of data modeling techniques and best practices.- Experience with data integration tools and ETL processes.- Familiarity with cloud data storage solutions and architectures.- Ability to design scalable and efficient data pipelines. Additional Information:- The candidate should have minimum 5 years of experience in Microsoft Azure Databricks.- This position is based in Pune.- A 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Pune&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/810326/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/810326/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Wed, 11 Feb 2026 03:11:45 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/810380/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Azure Data Services&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the architecture aligns with business needs and technical specifications while collaborating with various stakeholders to facilitate effective data management and utilization. Roles &amp;amp; Responsibilities:- Expected to be an SME, collaborate and manage the team to perform.- Responsible for team decisions.- Engage with multiple teams and contribute on key decisions.- Provide solutions to problems for their immediate team and across multiple teams.- Facilitate knowledge sharing and mentoring within the team to enhance overall performance.- Evaluate and implement best practices in data architecture to optimize data flow and storage. Professional &amp;amp; Technical &lt;b&gt;Skills:&lt;/b&gt;&lt;li&gt;- Must To Have &lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;Proficiency in Microsoft Azure Data Services.- Strong understanding of data modeling techniques and best practices.- Experience with data integration tools and ETL processes.- Familiarity with cloud-based data storage solutions and architectures.- Ability to design scalable and efficient data pipelines. Additional Information:- The candidate should have minimum 7.5 years of experience in Microsoft Azure Data Services.- This position is based at our Bengaluru office.- A 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/810380/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/810380/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Wed, 11 Feb 2026 03:10:22 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/810929/support-engineer-at-amazon/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;Core Job responsibilities: 1. Monitor &amp;amp; Optimise Redshift clusters: &lt;br&gt; Monitor Amazon Redshift clusters, identify long-running queries, and optimize them to maintain cluster performance and ensure healthy operational state 2. Monitor Data Pipelines &amp;amp; ETL Jobs &lt;br&gt; a. Continuously monitor Glue, Airflow, Lambda, Redshift, Spark, EMR and Kinesis jobs. &lt;br&gt; b. Identify failures, performance degradation, or bottlenecks in real time. 3. Troubleshoot Data Pipeline Failures &lt;br&gt; a. Diagnose issues in extraction, transformation, loading, schema mismatches, and data quality. &lt;br&gt; b. Perform impact analysis and apply immediate fixes. 4. Provide continuous support of existing data engineering products / tools / platforms / solutions that DE built and even extend them for new use cases onboard. 5. Handle On-Call / Incident Response &lt;br&gt; a. Own the end-to-end on-call rotation, respond to PagerDuty alerts, and restore systems within SLA. &lt;br&gt; b. Work directly with data engineering teams to resolve critical incidents. 6. Conduct Root Cause Analysis (RCA) &lt;br&gt; a. Perform RCA for every major incident. &lt;br&gt; b. Document findings and propose long-term preventive solutions. 7. Manage Data Quality &amp;amp; Validation &lt;br&gt; a. Validate accuracy, completeness, freshness, lineage, and schema consistency 8.Optimize Queries &amp;amp; Performance &lt;br&gt; a. Optimize inefficient SQL (Athena/Redshift/Presto/Spark). &lt;br&gt; b. Tune warehouse performance, resolve WLM queue issues, and reduce compute cost. 9.Maintain Metadata, Catalogs &amp;amp; Schemas &lt;br&gt; a. Manage Glue Catalog, partition refresh, schema evolution, table permissions, and lineage. &lt;br&gt; b. Ensure smooth integration between S3, Glue, Athena, Redshift, and Lake Formation. 10.Support Deployments &amp;amp; Release Management &lt;br&gt; a. Assist in promoting ETL jobs, model code, and pipeline configurations through CI/CD. &lt;br&gt; b. Validate deployments and perform rollback when necessary. 11. Collaborate with BI, Product &amp;amp; Stakeholders &lt;br&gt; a. Work with BI teams, analysts, PMs, and upstream/downstream owners. &lt;br&gt; b. Provide data accessibility support &amp;amp; answer data troubleshooting queries. 12. Maintain Documentation &amp;amp; SOPs &lt;br&gt; a. Maintain playbooks, runbooks, troubleshooting guides, and data dictionaries. &lt;br&gt; b. Ensure knowledge transfer and training for new team members. 2+ years of scripting language experience &lt;br&gt; Strong SQL and debugging skills &lt;br&gt; AWS (S3, Glue, EMR, Lambda, Redshift, Athena) &lt;br&gt; Strong Python and Pyspark skills &lt;br&gt; Understanding of data modelling, ETL, and batch/streaming pipelines &lt;br&gt; Experience with version control and CI/CD (Git, CodePipeline) &lt;br&gt; Good communication for stakeholder-facing troubleshooting &lt;br&gt; Good to have GenAI Skillset, but not mandatory Experience with AWS, networks and operating systems&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Internet&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Amazon&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/810929/support-engineer-at-amazon/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/810929/support-engineer-at-amazon/</link>
  <title>[Full Time] Support Engineer at Amazon</title>
  <dc:date>Tue, 10 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/812888/senior-software-engineer-etl-developer-at-cgi/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;We are looking for an experienced ETL Developer to join our team. The ideal candidate should be passionate about coding and developing scalable and high-performance applications. You will work closely with our front-end developers, designers, and other members of the team to deliver quality solutions that meet the needs of our clients.&lt;br&gt;Qualification: Bachelor&apos;s degree in Computer Science or related field or higher with minimum 5 years of relevant experience.&lt;/p&gt;&lt;div&gt;&lt;/div&gt;&lt;div&gt;&lt;b&gt;Your future duties and responsibilities:&lt;/b&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Key Responsibilities:&lt;br&gt;&lt;/strong&gt;&lt;/p&gt;&lt;li&gt;ETL Development &amp;amp; Implementation Strong experience in designing, developing, and deploying ETL solutions using Informatica Cloud Services (ICS), Informatica PowerCenter, and other data integration tools.&lt;/li&gt;&lt;li&gt;Data Integration &amp;amp; Optimization Proficient in extracting, transforming, and loading (ETL) data from multiple sources, optimizing performance, and ensuring data quality.&lt;/li&gt;&lt;li&gt;Stakeholder Collaboration Skilled at working with cross-functional teams, including data engineers, analysts, and business stakeholders, to align data solutions with business needs.&lt;/li&gt;&lt;li&gt;Scripting &amp;amp; Data Handling Experience with SQL, PL/SQL, and scripting languages (e.g., Python, Shell) for data manipulation, transformation, and automation.&lt;/li&gt;&lt;li&gt;Tool Proficiency Familiarity with Informatica Cloud, version control systems (e.g., Git), JIRA, Confluence, and Microsoft Office Suite.&lt;/li&gt;&lt;li&gt;Agile Methodologies Knowledge of Agile frameworks (Scrum, Kanban) with experience in managing backlogs, writing user stories, and participating in sprint planning.&lt;/li&gt;&lt;li&gt;Testing &amp;amp; Validation Involvement in ETL testing, data validation, unit testing, and integration testing to ensure accuracy, consistency, and completeness of data. Problem-Solving Skills Strong analytical mindset to troubleshoot, debug, and optimize ETL workflows, data pipelines, and integration solutions effectively.&lt;/li&gt;&lt;li&gt;Communication &amp;amp; Documentation Excellent written and verbal communication skills to document ETL processes, create technical design documents, and present data integration strategies to stakeholders.. &lt;/li&gt;&lt;li&gt;Bachelor&apos;s degree in Computer Science, Information Technology, or a related field.. Minimum of 6 years of experience as an Informatica Developer, with hands-on involvement in enterprise-scale ETL and data integration projects.. &lt;/li&gt;&lt;li&gt;Proficiency in Informatica Cloud Services (ICS), Informatica PowerCenter, and other ETL tools for data extraction, transformation, and loading.. Strong SQL skills and experience with databases such as Oracle, SQL Server, Snowflake, or PostgreSQL to support data processing and optimization.. &lt;/li&gt;&lt;li&gt;Ability to translate business requirements into ETL workflows, data mappings, and process flows that align with business goals.. Strong analytical and problem-solving skills to troubleshoot data integration issues, optimize performance, and ensure data accuracy.. Excellent communication skills, both written and verbal, to effectively collaborate with stakeholders and convey complex data concepts clearly.&lt;/li&gt;&lt;li&gt;Proven ability to work independently and collaboratively in a fast-paced, deadline-driven environment, adapting to changing priorities as needed.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Must-Have Skills:.&lt;/strong&gt; Proficiency in Informatica Cloud Services (ICS), Informatica PowerCenter, and other ETL tools for data extraction, transformation, and loading.. Strong SQL skills and experience with databases such as Oracle, SQL Server, Snowflake, or PostgreSQL to support data processing and optimization.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Good-to-Have Skills:&lt;/strong&gt;Exposure with Banking domain would be recommended . Able to work in a production support environment. . Should be able to work with L1 support team. . Good Communication is must.&lt;/li&gt;&lt;/div&gt;&lt;p&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Informatica&lt;/li&gt;&lt;li&gt;Oracle&lt;/li&gt;&lt;li&gt;RESTful (Rest-APIs)&lt;/li&gt;&lt;li&gt;Jenkins&lt;/li&gt;&lt;/ul&gt;&lt;div&gt;&lt;b&gt;&lt;/b&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;CGI&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/812888/senior-software-engineer-etl-developer-at-cgi/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/812888/senior-software-engineer-etl-developer-at-cgi/</link>
  <title>[Full Time] Senior Software Engineer-ETL Developer at CGI</title>
  <dc:date>Tue, 10 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/811298/support-engineer-lmaq-de-at-amazon/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;Core Job responsibilities: 1. Monitor &amp;amp; Optimise Redshift clusters: &lt;br&gt; Monitor Amazon Redshift clusters, identify long-running queries, and optimize them to maintain cluster performance and ensure healthy operational state 2. Monitor Data Pipelines &amp;amp; ETL Jobs &lt;br&gt; a. Continuously monitor Glue, Airflow, Lambda, Redshift, Spark, EMR and Kinesis jobs. &lt;br&gt; b. Identify failures, performance degradation, or bottlenecks in real time. 3. Troubleshoot Data Pipeline Failures &lt;br&gt; a. Diagnose issues in extraction, transformation, loading, schema mismatches, and data quality. &lt;br&gt; b. Perform impact analysis and apply immediate fixes. 4. Provide continuous support of existing data engineering products / tools / platforms / solutions that DE built and even extend them for new use cases onboard. 5. Handle On-Call / Incident Response &lt;br&gt; a. Own the end-to-end on-call rotation, respond to PagerDuty alerts, and restore systems within SLA. &lt;br&gt; b. Work directly with data engineering teams to resolve critical incidents. 6. Conduct Root Cause Analysis (RCA) &lt;br&gt; a. Perform RCA for every major incident. &lt;br&gt; b. Document findings and propose long-term preventive solutions. 7. Manage Data Quality &amp;amp; Validation &lt;br&gt; a. Validate accuracy, completeness, freshness, lineage, and schema consistency 8.Optimize Queries &amp;amp; Performance &lt;br&gt; a. Optimize inefficient SQL (Athena/Redshift/Presto/Spark). &lt;br&gt; b. Tune warehouse performance, resolve WLM queue issues, and reduce compute cost. 9.Maintain Metadata, Catalogs &amp;amp; Schemas &lt;br&gt; a. Manage Glue Catalog, partition refresh, schema evolution, table permissions, and lineage. &lt;br&gt; b. Ensure smooth integration between S3, Glue, Athena, Redshift, and Lake Formation. 10.Support Deployments &amp;amp; Release Management &lt;br&gt; a. Assist in promoting ETL jobs, model code, and pipeline configurations through CI/CD. &lt;br&gt; b. Validate deployments and perform rollback when necessary. 11. Collaborate with BI, Product &amp;amp; Stakeholders &lt;br&gt; a. Work with BI teams, analysts, PMs, and upstream/downstream owners. &lt;br&gt; b. Provide data accessibility support &amp;amp; answer data troubleshooting queries. 12. Maintain Documentation &amp;amp; SOPs &lt;br&gt; a. Maintain playbooks, runbooks, troubleshooting guides, and data dictionaries. &lt;br&gt; b. Ensure knowledge transfer and training for new team members. 2+ years of scripting language experience &lt;br&gt; Strong SQL and debugging skills &lt;br&gt; AWS (S3, Glue, EMR, Lambda, Redshift, Athena) &lt;br&gt; Strong Python and Pyspark skills &lt;br&gt; Understanding of data modelling, ETL, and batch/streaming pipelines &lt;br&gt; Experience with version control and CI/CD (Git, CodePipeline) &lt;br&gt; Good communication for stakeholder-facing troubleshooting &lt;br&gt; Good to have GenAI Skillset, but not mandatory Experience with AWS, networks and operating systems&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Internet&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Amazon&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/811298/support-engineer-lmaq-de-at-amazon/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/811298/support-engineer-lmaq-de-at-amazon/</link>
  <title>[Full Time] Support Engineer, LMAQ-DE at Amazon</title>
  <dc:date>Tue, 10 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/810507/bi-developer-power-bi-sql-at-apexon/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;strong&gt; Key Responsibilities &lt;/strong&gt; &lt;/div&gt; &lt;div&gt; Design, develop, and deploy advanced Power BI reports, dashboards, and semantic models &lt;/div&gt; &lt;div&gt; Build and optimize data models using star/snowflake schemas for performance and scalability &lt;/div&gt; &lt;div&gt; Write complex DAX measures, calculated columns, and KPIs &lt;/div&gt; &lt;div&gt; Develop and maintain Power Query (M) transformations &lt;/div&gt; &lt;div&gt; Ensure data accuracy, governance, security (RLS), and best practices across Power BI assets &lt;/div&gt; &lt;div&gt; Optimize report performance and troubleshoot issues in large datasets &lt;/div&gt; &lt;div&gt; Collaborate with data engineers on data pipelines and warehouse design &lt;/div&gt; &lt;div&gt; Translate business requirements into technical BI solutions &lt;/div&gt; &lt;div&gt; Lead BI standards, documentation, and best practices &lt;/div&gt; &lt;div&gt; Mentor junior developers and conduct code/review quality checks &lt;/div&gt; &lt;div&gt; Support Power BI Service administration (workspaces, gateways, deployments, licensing) &lt;/div&gt; &lt;div&gt; Work with stakeholders to drive data-driven decision making &lt;/div&gt; &lt;div&gt; &lt;strong&gt; Required Skills Qualifications &lt;/strong&gt; &lt;/div&gt; &lt;div&gt; 5+ years of experience in BI development, with strong Power BI focus &lt;/div&gt; &lt;div&gt; Expert-level Power BI (Desktop Service) experience &lt;/div&gt; &lt;div&gt; Advanced DAX knowledge (time intelligence, performance tuning) &lt;/div&gt; &lt;div&gt; Strong data modeling experience (dimensional modeling) &lt;/div&gt; &lt;div&gt; Solid SQL skills (SQL Server, Azure SQL, or equivalent) &lt;/div&gt; &lt;div&gt; Experience with Power Query (M language) &lt;/div&gt; &lt;div&gt; Understanding of data warehousing concepts and ETL processes &lt;/div&gt; &lt;div&gt; Experience implementing Row-Level Security (RLS) &lt;/div&gt; &lt;div&gt; Strong analytical, problem-solving, and communication skills &lt;/div&gt; &lt;div&gt; &lt;strong&gt; Preferred / Nice to Have &lt;/strong&gt; &lt;/div&gt; &lt;div&gt; Experience with Azure data stack (ADF, Synapse, Data Lake, Databricks) &lt;/div&gt; &lt;div&gt; Experience with CI/CD for Power BI (Azure DevOps, Git) &lt;/div&gt; &lt;div&gt; Exposure to Tabular Editor, DAX Studio, ALM Toolkit &lt;/div&gt; &lt;div&gt; Experience with other BI tools (SSRS, Tableau, Looker) &lt;/div&gt; &lt;div&gt; Microsoft Power BI certification (PL-300 or equivalent) &lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Apexon&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/810507/bi-developer-power-bi-sql-at-apexon/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/810507/bi-developer-power-bi-sql-at-apexon/</link>
  <title>[Full Time] BI Developer (Power BI, SQL) at Apexon</title>
  <dc:date>Mon, 09 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/811419/servicenow-csm-professional-at-dxc-technology/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;b&gt; Key Responsibilities &lt;/b&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; Design, configure, and customize &lt;b&gt; ServiceNow CSM modules &lt;/b&gt; including: &lt;ul&gt; &lt;li&gt; Case Management, Agent Workspace, Account Contact Management, Entitlements, and Service Contracts. &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt; Develop &lt;b&gt; custom CSM applications, components, and scoped apps &lt;/b&gt; to meet business requirements. &lt;/li&gt; &lt;li&gt; Create and maintain &lt;b&gt; integrations with CRM, ERP, and other third-party systems &lt;/b&gt; using REST/SOAP APIs, IntegrationHub, Web Services, and MID Server. &lt;/li&gt; &lt;li&gt; Build and automate &lt;b&gt; customer workflows &lt;/b&gt; using Flow Designer, Business Rules, and Scripting. &lt;/li&gt; &lt;li&gt; Develop and maintain &lt;b&gt; UI pages, client scripts, UI actions, UI policies, and Script Includes &lt;/b&gt; . &lt;/li&gt; &lt;li&gt; Enhance and customize &lt;b&gt; Service Portal / Customer Portal &lt;/b&gt; experiences for end users. &lt;/li&gt; &lt;li&gt; Collaborate with business analysts, architects, and stakeholders to translate business needs into technical solutions. &lt;/li&gt; &lt;li&gt; Implement &lt;b&gt; performance optimization, access control (ACLs) &lt;/b&gt; , and data security best practices. &lt;/li&gt; &lt;li&gt; Participate in &lt;b&gt; platform upgrades, patching, and troubleshooting &lt;/b&gt; to ensure stability and reliability. &lt;/li&gt; &lt;li&gt; Document technical solutions, conduct peer code reviews, and support continuous improvement. &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;b&gt; Required Skills Qualifications &lt;/b&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;b&gt; 6 10 years &lt;/b&gt; of ServiceNow development experience with strong focus on &lt;b&gt; Customer Service Management (CSM) &lt;/b&gt; . &lt;/li&gt; &lt;li&gt; Proficiency in &lt;b&gt; JavaScript, Glide scripting, and ServiceNow server/client scripting &lt;/b&gt; . &lt;/li&gt; &lt;li&gt; Strong knowledge of &lt;b&gt; CSM data models, case lifecycle, and customer workflows &lt;/b&gt; . &lt;/li&gt; &lt;li&gt; Experience integrating ServiceNow with &lt;b&gt; CRM tools (e.g., Salesforce, Dynamics), ERP, or external ticketing systems &lt;/b&gt; . &lt;/li&gt; &lt;li&gt; Experience developing &lt;b&gt; custom applications &lt;/b&gt; using ServiceNow Studio and Scoped Apps. &lt;/li&gt; &lt;li&gt; Hands-on experience with &lt;b&gt; Flow Designer, IntegrationHub, and Service Portal &lt;/b&gt; . &lt;/li&gt; &lt;li&gt; Familiarity with &lt;b&gt; account and contact management, entitlements, SLAs, and knowledge base &lt;/b&gt; within CSM. &lt;/li&gt; &lt;li&gt; Good understanding of &lt;b&gt; ITIL, customer service best practices, and digital workflows &lt;/b&gt; . &lt;/li&gt; &lt;li&gt; Strong analytical, problem-solving, and communication skills. &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; Must have : &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;b&gt; ServiceNow Certified System Administrator (CSA) &lt;/b&gt; &lt;/li&gt; &lt;li&gt; &lt;b&gt; ServiceNow Certified Application Developer (CAD) &lt;/b&gt; &lt;/li&gt; &lt;li&gt; &lt;b&gt; ServiceNow Certified Implementation Specialist CSM &lt;/b&gt; &lt;/li&gt; &lt;li&gt; Experience with &lt;b&gt; Virtual Agent, Predictive Intelligence, and Case Deflection &lt;/b&gt; . &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;b&gt; Preferred / Nice-to-Have &lt;/b&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; Exposure to &lt;b&gt; DevOps tools &lt;/b&gt; , &lt;b&gt; Git &lt;/b&gt; , and &lt;b&gt; CI/CD pipelines &lt;/b&gt; for ServiceNow. &lt;/li&gt; &lt;li&gt; Understanding of &lt;b&gt; customer engagement analytics &lt;/b&gt; and &lt;b&gt; performance metrics &lt;/b&gt; . &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;b&gt; Soft Skills &lt;/b&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; Excellent communication and teamwork abilities. &lt;/li&gt; &lt;li&gt; Strong documentation and technical writing skills. &lt;/li&gt; &lt;li&gt; Detail-oriented with a focus on quality and performance. &lt;/li&gt; &lt;li&gt; Ability to manage multiple priorities and adapt to changing requirements. &lt;/li&gt; &lt;li&gt; Self-motivated, proactive, and collaborative. &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;strong&gt; &lt;/strong&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;DXC Technology&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Pune&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/811419/servicenow-csm-professional-at-dxc-technology/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/811419/servicenow-csm-professional-at-dxc-technology/</link>
  <title>[Full Time] ServiceNow CSM Professional at DXC Technology</title>
  <dc:date>Mon, 09 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/812013/advisor-systems-security-analysis-ibm-datapower-at-fis/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;b&gt; &lt;span&gt; What does a successful Senior API Management Architect/Engineer do at Fiserv &lt;/span&gt; &lt;/b&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; The Senior API Management Architect/Engineer is crucial in shaping our global API Management and integration environment. In this role, you will collaborate with development teams to design Fiservs API integration services platform. You support enterprise solutions with extensive experience in API Management and oversee the complete lifecycle of API proxies and API onboarding. A successful candidate ensures well-designed APIs adhering to platform standards, promoting code reuse and collaboration. You are also expected to have Production Support, Incident handling experience and perform regular Operational activities. &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;b&gt; &lt;span&gt; &lt;/span&gt; &lt;/b&gt; &lt;/div&gt; &lt;div&gt; &lt;b&gt; &lt;span&gt; What will you do: &lt;/span&gt; &lt;/b&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; Plan and deliver API management solutions using platforms like IBM Datapower, Apigee Edge, Apigee X, and Hybrid. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Design, develop, and manage IBM DataPower services (multi-protocol gateways, web service proxies, XML firewalls). &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Implement API layer requirements like security, custom analytics, throttling, caching, logging, and request/response modifications. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Build microservices systems using Java, Spring, or Node.js to support API management solutions. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Design API specifications and develop proxies, implementing custom policies using Java, JavaScript, Node.js, Python, and other languages. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Configure the Apigee platform, including keystores, trust stores, and KVMs. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Ensure platform health, manage SLAs, and work with QA teams to implement API testing automation. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Lead day-to-day operational activities including Certificate management, Key rotation, environment maintenance automating regular Operational tasks using Scripts (Python, shell, etc.). &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Provide L2/3 production support for API platforms and associated components Manage resolve incidents, outages performance issues in a timely and structured manner. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Participate in the weekly 12*7 on-call rotation (as applicable within the team) ensuring SLAs and availability targets are met. &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;b&gt; &lt;span&gt; What you will need to have: &lt;/span&gt; &lt;/b&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; 6+ years of firsthand development experience with IBM DataPower, Google Apigee platforms like Apigee X, Hybrid, or Edge including proxy/service development, deployments, and configuration. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; 4+ years of direct experience with major programming and scripting languages like Python/ JavaScript. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; 2+ years of experience in developing microservices. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Bachelor&apos;&apos;s degree in a technical field (e.g., Engineering, Computer Science) or equivalent military experience. &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;b&gt; &lt;span&gt; What would be great to have: &lt;/span&gt; &lt;/b&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; Cloud developer or cloud architect certification in Azure, AWS, or Google Cloud Platform. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Experience with API testing and automation test frameworks. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Knowledge of Google Cloud Platform, or experience with cloud environments like Azure or AWS. &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;span&gt; &lt;span&gt; Thank you for considering employment with Fiserv.Please: &lt;/span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; Apply using your legal name &lt;/span&gt; &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; &lt;span&gt; Complete the step-by-step profile and attach your resume (either is acceptable, both are preferable). &lt;/span&gt; &lt;/span&gt; &amp;nbsp; &lt;/li&gt; &lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Fiserv&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Pune&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/812013/advisor-systems-security-analysis-ibm-datapower-at-fis/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/812013/advisor-systems-security-analysis-ibm-datapower-at-fis/</link>
  <title>[Full Time] Advisor, Systems Security Analysis  (IBM DataPower) at FIS</title>
  <dc:date>Fri, 06 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/810037/mim-professional-at-zensar/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; Bachelors degree in Computer Science or similar field; or equivalent work experience &lt;/div&gt; &lt;div&gt; 4-7 years of relevant experience required. &lt;/div&gt; &lt;div&gt; Understanding of ITIL - Change, Incident, Problem, Knowledge Management &lt;/div&gt; &lt;div&gt; Experience working with a Server/NOC/Application support and troubleshooting &lt;/div&gt; &lt;div&gt; Strong analytic skills including ability to identify patterns and potential issues &lt;/div&gt; &lt;div&gt; Situation management and decision-making skills &lt;/div&gt; &lt;div&gt; Exceptional written and verbal communication skills. &lt;/div&gt; &lt;div&gt; Ability to multi-task and remain calm in critical situations &lt;/div&gt; &lt;div&gt; Detail oriented &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; Preferred Qualifications &lt;/div&gt; &lt;div&gt; Certifications preferred: ITIL Foundation Certification &lt;/div&gt; &lt;div&gt; Previous experience in a Critical/Major Incident Management role &lt;/div&gt; &lt;div&gt; Experience working with the following tools: MS Office, ServiceNow, Power BI, JIRA, Confluence &lt;/div&gt; &lt;div&gt; Experience with event correlation and interpretation, utilizing various monitoring and troubleshooting tools (Dynatrace, SumoLogic) &lt;/div&gt; &lt;div&gt; Solid understanding of business functional areas, systems, and capabilities &lt;/div&gt; &lt;div&gt; Solid understanding of basic infrastructure design, operation and technical troubleshooting &lt;/div&gt; &lt;div&gt; Experience supporting applications within the Ficial Services industry (e.g. broker-dealer, asset management, insurance, etc.). &lt;/div&gt; &lt;div&gt; &lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Zensar&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/810037/mim-professional-at-zensar/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/810037/mim-professional-at-zensar/</link>
  <title>[Full Time] MIM Professional at Zensar</title>
  <dc:date>Fri, 06 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/809972/aws-sre-devops-professional-at-zensar/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; Your responsibilities will also extend to monitoring and troubleshooting issues, implementing best practices, and staying updated with the latest AWS services and features. &lt;/div&gt; &lt;ul&gt; &lt;li&gt; Bachelors degree in Computer Science, Engineering, or a related field. &lt;/li&gt; &lt;li&gt; 3+ years of experience in AWS cloud operations and infrastructure management. &lt;/li&gt; &lt;li&gt; Strong knowledge of AWS services, including EC2, S3, Lambda, and RDS. &lt;/li&gt; &lt;li&gt; Experience with cloud automation and configuration management tools (e.g., Terraform, Ansible). &lt;/li&gt; &lt;li&gt; Proficiency in scripting languages (Python, Bash) for automation and troubleshooting. &lt;/li&gt; &lt;li&gt; Understanding of cloud security principles and best practices. &lt;/li&gt; &lt;li&gt; Excellent problem-solving and analytical skills. &lt;/li&gt; &lt;li&gt; Ability to work collaboratively in a cross-functional team environment. &lt;/li&gt; &lt;li&gt; Strong communication skills for effective collaboration and documentation. &lt;/li&gt; &lt;li&gt; Willingness to stay updated with emerging cloud technologies and trends. &lt;/li&gt; &lt;/ul&gt; &lt;ul&gt; &lt;li&gt; Manage and optimize AWS cloud infrastructure, including EC2, S3, and RDS instances. &lt;/li&gt; &lt;li&gt; Ensure high availability, scalability, and performance of cloud-based applications. &lt;/li&gt; &lt;li&gt; Monitor and troubleshoot system issues, identifying and resolving bottlenecks. &lt;/li&gt; &lt;li&gt; Implement security measures and best practices to protect data and systems. &lt;/li&gt; &lt;li&gt; Collaborate with development teams to provide technical support and guidance. &lt;/li&gt; &lt;li&gt; Automate deployment processes and infrastructure provisioning using AWS tools. &lt;/li&gt; &lt;li&gt; Conduct regular performance reviews and optimize resource utilization. &lt;/li&gt; &lt;li&gt; Stay updated with AWS service updates and recommend new features for implementation. &lt;/li&gt; &lt;li&gt; Document and maintain knowledge base articles for internal reference. &lt;/li&gt; &lt;li&gt; Ensure compliance with industry standards and regulations for cloud operations. &lt;/li&gt; &lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Zensar&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/809972/aws-sre-devops-professional-at-zensar/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/809972/aws-sre-devops-professional-at-zensar/</link>
  <title>[Full Time] Aws Sre Devops Professional at Zensar</title>
  <dc:date>Fri, 06 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/809874/senior-software-engineer-at-allianz/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;strong&gt; &lt;/strong&gt; &lt;p&gt; &lt;strong&gt;Overall Objectives of Job:&lt;/strong&gt; (If multiple sections, accord weightage to each section)&lt;/p&gt; &lt;p&gt; &lt;strong&gt;Project Name:&lt;/strong&gt; GBCoC&lt;/p&gt; &lt;ul&gt; &lt;li&gt;Work with architect(s) and development team to enhance the DevOps toolchain and build tools to facilitate operations and to ensure stability of the business solution&lt;/li&gt; &lt;li&gt;Defining and implementing the best practices for system configuration, monitoring, and performance optimization&lt;br&gt; &lt;/li&gt; &lt;li&gt;Actualize the DevOps processes for environment management including infrastructure, platform, and application lifecycle&lt;br&gt; &lt;/li&gt; &lt;li&gt;Maintain up-to-date documentation and participate actively in knowledge sharing activities like communities of practice, and/or other knowledge exchanges&lt;/li&gt; &lt;/ul&gt; &lt;p&gt;Weightage can be accorded to each section as follows:&lt;/p&gt; &lt;ul&gt; &lt;li&gt;Managing existing infrastructure, platform, and application: 40%&lt;/li&gt; &lt;li&gt;Enhancing existing DevOps toolchain, building operational tools: 30%&lt;/li&gt; &lt;li&gt;Documentation and knowledge sharing: 15%&lt;/li&gt; &lt;li&gt;Collaboration and communication: 15%&lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;strong&gt;100%&lt;/strong&gt; &lt;/p&gt; &lt;strong&gt;PART 3&lt;/strong&gt; &lt;p&gt; &lt;strong&gt;Qualification, Experience and Skills&lt;/strong&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Bachelor&apos;s degree in Computer Science, Information Technology, or related field&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Open-minded, good communication and interpersonal skills&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Fast learner &amp;amp; curious&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;At least 2+ years of total relevant experience and hands-on experience in the below:&lt;/p&gt; &lt;ul&gt; &lt;li&gt;Public cloud stack: AWS&lt;/li&gt; &lt;li&gt;Container technology: Docker, Kubernetes&lt;/li&gt; &lt;li&gt;Automation: Ansible, Terraform&lt;/li&gt; &lt;li&gt;CI/CD: GitHub Actions&lt;/li&gt; &lt;li&gt;Observability tool, e-g-: Prometheus, Grafana&lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Good experience with scripting/automation&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Has basic understanding in Agile software development framework (e-g- Scrum) and tools (e-g- JIRA)&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;strong&gt;Preferred/Additional Technical And Professional Expertise&lt;/strong&gt; &lt;ul&gt; &lt;li&gt;Working with change management of a production grade application&lt;/li&gt; &lt;li&gt;Programming knowledge and experience in any general-purpose programming language of min 1 year&lt;/li&gt; &lt;li&gt;Experience in any relational database operations of min 1 year&lt;/li&gt; &lt;li&gt;Good understanding of traditional enterprise technology stack and enterprise networking concepts&lt;/li&gt; &lt;li&gt;Experience/sound knowledge of managing any blockchain based solutions would be a big plus&lt;/li&gt; &lt;/ul&gt; &lt;strong&gt;PART 4&lt;/strong&gt; &lt;p&gt; &lt;strong&gt;Skills/Specific Tasks/Activities performed&lt;/strong&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Work with architects and development teams to enhance DevOps toolchain9&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Manage the entire lifecycle of infrastructure, platforms, and application8&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Optimize performance monitoring and tuning8&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Build and maintain tools for operational stability and efficiency9&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Maintain up-to-date documentation for processes and configurations7&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Monitor system health using observability tools like Prometheus and Grafana7&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Support in regular release activities7&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;strong&gt;PART 5&lt;/strong&gt; &lt;p&gt; &lt;strong&gt;Key Competencies&lt;/strong&gt; &lt;/p&gt; &lt;p&gt; &lt;strong&gt;Area&lt;/strong&gt; &lt;/p&gt; &lt;p&gt; &lt;strong&gt;Competency&lt;/strong&gt;*&lt;/p&gt; &lt;p&gt; &lt;strong&gt;Rate Skill Importance&lt;/strong&gt; 1=aware;2=basic; 3=intermediate/experienced;4=advanced; 5=expert&lt;/p&gt; &lt;p&gt; &lt;strong&gt;Allianz India Skill catalogue reference&lt;/strong&gt; (for HR use only)&lt;/p&gt; &lt;p&gt; &lt;strong&gt;Technical&lt;/strong&gt; (max of 5)&lt;/p&gt; &lt;ul&gt; &lt;li&gt;Proficiency in AWS, Docker, Kubernetes, Ansible, Terraform, GitHub Actions, Prometheus, and Grafana&lt;/li&gt; &lt;li&gt;Scripting and automation skills&lt;/li&gt; &lt;/ul&gt; &lt;p&gt;4&lt;/p&gt; &lt;p&gt; &lt;strong&gt;Functional&lt;/strong&gt; (max of 5)&lt;/p&gt; &lt;ul&gt; &lt;li&gt;Competence in performance optimization and troubleshooting&lt;/li&gt; &lt;li&gt;Ability to define and implement best practices&lt;/li&gt; &lt;/ul&gt; &lt;p&gt;3&lt;/p&gt; &lt;p&gt; &lt;strong&gt;Behaviours&lt;/strong&gt;*(max of 5)&lt;/p&gt; &lt;ul&gt; &lt;li&gt;Open-minded and adaptable&lt;/li&gt; &lt;li&gt;Strong communication and interpersonal skills&lt;/li&gt; &lt;li&gt;Fast learner and curious&lt;/li&gt; &lt;li&gt;Proactive in knowledge sharing and documentation&lt;/li&gt; &lt;/ul&gt; &lt;p&gt;4&lt;/p&gt; &lt;p&gt; &lt;strong&gt;Productivity&lt;/strong&gt; (max of 3)&lt;/p&gt; &lt;ul&gt; &lt;li&gt;Knowledge of Agile frameworks and tools like Jira &amp;amp; Confluence&lt;/li&gt; &lt;li&gt;Task orientation&lt;/li&gt; &lt;li&gt;Time Management&lt;/li&gt; &lt;/ul&gt; &lt;p&gt;3&lt;/p&gt; &lt;p&gt; &lt;strong&gt;Communication&lt;/strong&gt; (max of 3)&lt;/p&gt; &lt;ul&gt; &lt;li&gt;Clear and concise in verbal and written communication&lt;/li&gt; &lt;li&gt;Effective in collaborating with cross-functional teams&lt;/li&gt; &lt;li&gt;Skilled in documentation and knowledge transfer&lt;/li&gt; &lt;/ul&gt; &lt;p&gt;4&lt;/p&gt; &lt;strong&gt;PART 6&lt;/strong&gt; &lt;p&gt; &lt;strong&gt;Authorities Held&lt;/strong&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt;To enhance the DevOps toolchain and build tools to facilitate operations and ensure stability of the business solution&lt;/li&gt; &lt;li&gt;Proactively identify possible technical issues, showstoppers, and possible causes with solutions to avoid/rectify them in the early stages itself&lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;strong&gt;53636 | IT &amp;amp; Tech Engineering | Professional | Non-Executive | Allianz Technology | Full-Time | Permanent&lt;/strong&gt; &lt;/p&gt; &lt;p&gt;Locations - Trivandrum, KL, IN, 695581&lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Financial Services&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Allianz&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Kolkata&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/809874/senior-software-engineer-at-allianz/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/809874/senior-software-engineer-at-allianz/</link>
  <title>[Full Time] Senior Software Engineer at Allianz</title>
  <dc:date>Fri, 06 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/812347/reconciliation-system-developer-at-luxoft/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt;&lt;strong&gt;Project description&lt;/strong&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;TLM Smartstream application development and reconciliation automation for a large bank in Singapore. &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;b&gt;Responsibilities&lt;/b&gt;&lt;div&gt;&lt;div&gt;&lt;p&gt;Configure and manage reconciliation processes for TLM Smart stream application reconciliation automation &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;&lt;/p&gt;&lt;li&gt;Set up and maintain reconciliations between application external systems &lt;/li&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;&lt;/p&gt;&lt;li&gt;Coordinate work with users/project team to design, build, test and deliver the solutions &lt;/li&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;&lt;/p&gt;&lt;li&gt;Create and maintain documentation &lt;/li&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;&lt;/p&gt;&lt;li&gt;Execute the unit testing, system integration user acceptance testing &lt;/li&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;&lt;/p&gt;&lt;li&gt;Configuration/code management and maintenance of dev/test environment &lt;/li&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;&lt;/p&gt;&lt;li&gt;Follow the delivery governance model and regular updates to project Business stakeholders &lt;/li&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;div&gt;&lt;b&gt;Skills&lt;/b&gt;&lt;/div&gt;&lt;p&gt;&lt;b&gt;Must have&lt;/b&gt;&lt;/p&gt;&lt;div&gt;&lt;div&gt;&lt;p&gt;4-8 years of relevant experience with Reconciliation system. &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;&lt;/p&gt;&lt;li&gt;Experience on any Reconciliation system, e.g. Gresham, Intellimatch, FIS (formerly SunGard), Fiserv Frontier Reconciliation, AutoRek, Duco, Broadridge, ReconArt &lt;/li&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;&lt;/p&gt;&lt;li&gt;Good knowledge of a RDBMS in particular at least one of Oracle, Sybase SQL Server. &lt;/li&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;&lt;/p&gt;&lt;li&gt;Good knowledge of at least two operating systems including Windows, Unix, and/or LINUX &lt;/li&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;&lt;/p&gt;&lt;li&gt;Good solution configuration/ implementation skills including the ability to configure business applications using front end tools, database table updates, and data transformation tools (ETL). &lt;/li&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;&lt;/p&gt;&lt;li&gt;Experience in unit testing, system integration user acceptance testing &lt;/li&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;&lt;/p&gt;&lt;li&gt;Writing test case, test design and configuration/code management and maintenance of dev/test environment &lt;/li&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;&lt;/p&gt;&lt;li&gt;Good communication skills and stakeholder management &lt;/li&gt;&lt;/div&gt;&lt;/div&gt;&lt;p&gt;&lt;b&gt;Nice to have &lt;/b&gt;&lt;/p&gt;&lt;div&gt;&lt;div&gt;&lt;p&gt;TLM, Informatica power center &lt;/p&gt;&lt;/div&gt;&lt;/div&gt;&lt;div&gt;&lt;/div&gt;&lt;div&gt;&lt;b&gt;&lt;/b&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Legal&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Luxoft&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/812347/reconciliation-system-developer-at-luxoft/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/812347/reconciliation-system-developer-at-luxoft/</link>
  <title>[Full Time] Reconciliation System Developer at Luxoft</title>
  <dc:date>Fri, 06 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/813121/uft-consultant-at-opentext/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&lt;span&gt;&lt;strong&gt;The Opportunity:&lt;/strong&gt;&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;The role is ideal for professionals with &lt;strong&gt;5 to 8 years of experience&lt;/strong&gt; who are passionate about &lt;strong&gt;functional and automation testing using UFT One&lt;/strong&gt;. You will work on &lt;strong&gt;SAP, desktop, web, and mobile automation projects&lt;/strong&gt;, contribute to building automation frameworks from scratch, and collaborate closely with &lt;strong&gt;business, testing, and development teams&lt;/strong&gt; in a dynamic and fast-paced environment.&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;&lt;strong&gt;You Are Great At:&lt;/strong&gt;&lt;/span&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Functional testing and &lt;strong&gt;test automation using UFT One&lt;/strong&gt;&lt;/li&gt;&lt;li&gt;Designing, developing, and executing &lt;strong&gt;test plans and test cases&lt;/strong&gt;&lt;/li&gt;&lt;li&gt;Automating &lt;strong&gt;SAP, desktop, and web applications&lt;/strong&gt;&lt;/li&gt;&lt;li&gt;Building automation frameworks from scratch:&lt;ul&gt;&lt;li&gt;Keyword Driven Framework&lt;/li&gt;&lt;li&gt;Modular Framework&lt;/li&gt;&lt;li&gt;BPT Framework&lt;/li&gt;&lt;/ul&gt;&lt;/li&gt;&lt;li&gt;Writing &lt;strong&gt;VB Scripts&lt;/strong&gt;, reusable functions, and performing &lt;strong&gt;Excel operations&lt;/strong&gt;&lt;/li&gt;&lt;li&gt;Using &lt;strong&gt;ALM / Jira&lt;/strong&gt; for test management&lt;/li&gt;&lt;li&gt;Installing, configuring, integrating, upgrading, and supporting &lt;strong&gt;UFT One&lt;/strong&gt;&lt;/li&gt;&lt;li&gt;Maintaining automation environments and test artifacts&lt;/li&gt;&lt;li&gt;Working independently while collaborating effectively with cross-functional teams&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;span&gt;&lt;strong&gt;What It Takes:&lt;/strong&gt;&lt;/span&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;&lt;strong&gt;5-8 years&lt;/strong&gt; of hands-on experience in automation and functional testing&lt;/li&gt;&lt;li&gt;Strong expertise in &lt;strong&gt;UFT One&lt;/strong&gt; (mandatory)&lt;/li&gt;&lt;li&gt;Experience with &lt;strong&gt;SAP automation&lt;/strong&gt; and &lt;strong&gt;desktop/web application testing&lt;/strong&gt;&lt;/li&gt;&lt;li&gt;Solid understanding of &lt;strong&gt;automation framework design&lt;/strong&gt;&lt;/li&gt;&lt;li&gt;Experience with &lt;strong&gt;UFT Digital Lab and mobile testing&lt;/strong&gt; (added advantage)&lt;/li&gt;&lt;li&gt;Proficiency in &lt;strong&gt;VB Scripting&lt;/strong&gt; and automation best practices&lt;/li&gt;&lt;li&gt;Strong exposure to &lt;strong&gt;test management tools&lt;/strong&gt; such as ALM and Jira&lt;/li&gt;&lt;li&gt;Excellent &lt;strong&gt;time management skills&lt;/strong&gt; with a proven ability to meet deadlines&lt;/li&gt;&lt;li&gt;Ability to work both &lt;strong&gt;independently and in a team-oriented environment&lt;/strong&gt;&lt;/li&gt;&lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Software Product&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Opentext&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/813121/uft-consultant-at-opentext/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/813121/uft-consultant-at-opentext/</link>
  <title>[Full Time] UFT Consultant at Opentext</title>
  <dc:date>Thu, 05 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/809763/senior-software-engineer-_power-bi-developer-at-cgi/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt; &lt;li&gt; &lt;b&gt; Education Qualification: &lt;/b&gt; Bachelor&apos;&apos;s degree in computer science or related field or Exposure to any ERP platforms experience. &lt;br&gt; &lt;/li&gt;&lt;li&gt; Must-Have Skills: &lt;br&gt; &lt;/li&gt;&lt;li&gt; Develop and design new dashboards and visual reports using Power BI Desktop. &lt;br&gt; &lt;/li&gt;&lt;li&gt; - Deploy Power BI reports and dashboards using Power BI Service. &lt;br&gt; &lt;/li&gt;&lt;li&gt; - Write SQL queries and views in Snowflake database to import data as per data model requirements. &lt;br&gt; &lt;/li&gt;&lt;li&gt; - Conduct unit testing of all tools in the dashboard with respect to the database before deployment to the workspace. &lt;br&gt; &lt;/li&gt;&lt;li&gt; - Optimize the performance of Power BI dashboards and implement bug fixes. &lt;br&gt; &lt;/li&gt;&lt;li&gt; - Enhance and format Power BI dashboards to improve user experience. &lt;br&gt; &lt;/li&gt;&lt;li&gt; - Write DAX formulas/functions for complex calculated columns and measures in Power BI. &lt;br&gt; &lt;/li&gt;&lt;li&gt; - Understand ETL processes and perform data analysis for data visualization. &lt;br&gt; &lt;/li&gt;&lt;li&gt; - Deliver business user stories as per Agile Scrum methodology via Program Increment workflow. &lt;br&gt; &lt;/li&gt;&lt;li&gt; &lt;br&gt; &lt;/li&gt;&lt;li&gt; Required Skills &lt;br&gt; &lt;/li&gt;&lt;li&gt; - Proven experience in developing visual reports and dashboards using Power BI Desktop. &lt;br&gt; &lt;/li&gt;&lt;li&gt; - Proficiency in SQL and experience with Snowflake database. &lt;br&gt; &lt;/li&gt;&lt;li&gt; - Experience with Power Automate &lt;br&gt; &lt;/li&gt;&lt;li&gt; - Strong understanding of ETL processes and data analysis. &lt;br&gt; &lt;/li&gt;&lt;li&gt; - Experience in report builder for paginated reports &lt;br&gt; &lt;/li&gt;&lt;li&gt; - Ability to write complex DAX formulas/functions. &lt;br&gt; &lt;/li&gt;&lt;li&gt; - Experience in performance optimization and bug fixing for Power BI dashboards. &lt;br&gt; &lt;/li&gt;&lt;li&gt; - Familiarity with Agile Scrum methodology and Program Increment workflow. &lt;br&gt; &lt;/li&gt;&lt;li&gt; - Excellent problem-solving skills and attention to detail. &lt;br&gt; &lt;/li&gt;&lt;li&gt; - Strong communication and teamwork skills. &lt;br&gt; &lt;/li&gt;&lt;li&gt; Life at CGI: &lt;br&gt; &lt;/li&gt;&lt;li&gt; It is rooted in ownership, teamwork, respect and belonging. Here, youll reach your full potential because &lt;br&gt; &lt;/li&gt;&lt;li&gt; You are invited to be an owner from day 1 as we work together to bring our Dream to life. Thats why we call ourselves CGI Partners rather than employees. We benefit from our collective success and actively shape our companys strategy and direction &lt;br&gt; &lt;/li&gt;&lt;li&gt; Your work creates value. Youll develop innovative solutions and build relationships with teammates and clients while accessing global capabilities to scale your ideas, embrace new opportunities, and benefit from expansive industry and technology expertise &lt;br&gt; &lt;/li&gt;&lt;li&gt; Youll shape your career by joining a company built to grow and last. Youll be supported by leaders who care about your health and well-being and provide you with opportunities to deepen your skills and broaden your horizons &lt;br&gt; &lt;/li&gt;&lt;li&gt; Come join our team, one of the largest IT and business consulting services firms in the world &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;b&gt; Skills: &lt;/b&gt; &lt;ul&gt; &lt;li&gt; Data Analysis &lt;/li&gt; &lt;li&gt; Data Engineering &lt;/li&gt; &lt;li&gt; SQLite &lt;/li&gt; &lt;li&gt; Analytical Thinking &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;div&gt; &lt;b&gt; &lt;/b&gt; &lt;/div&gt; &lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Analyst&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;CGI&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/809763/senior-software-engineer-_power-bi-developer-at-cgi/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/809763/senior-software-engineer-_power-bi-developer-at-cgi/</link>
  <title>[Full Time] Senior Software Engineer _Power BI Developer at CGI</title>
  <dc:date>Thu, 05 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/809783/ibm-isam-specialist-at-virtusa/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt;&lt;div&gt;&lt;div&gt;&lt;div&gt;&lt;p&gt;&lt;strong&gt;Primary Skills:&lt;/strong&gt; ISAM &lt;/p&gt;&lt;p&gt;&lt;strong&gt;Job Summary:&lt;/strong&gt; We are seeking an experienced IBM ISAM Specialist to design, implement, and support IBM Security Access Manager (ISAM) solutions for securing enterprise applications and systems. The ideal candidate will have a strong background in configuring authentication and authorization policies, enabling Single Sign-On (SSO), and integrating ISAM with various identity providers and applications.&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Responsibilities:&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Deploy and configure IBM Security Access Manager (ISAM) components including WebSEAL, Policy Server, and Federation modules.&lt;/li&gt;&lt;li&gt;Set up reverse proxy instances, junctions, ACLs, and security policies.&lt;/li&gt;&lt;li&gt;Implement Single Sign-On (SSO) using SAML, OAuth, and OpenID Connect.&lt;/li&gt;&lt;li&gt;Configure Multi-Factor Authentication (MFA) and adaptive access controls.&lt;/li&gt;&lt;li&gt;Integrate ISAM with LDAP directories (Active Directory, IBM Security Directory Server, Oracle Directory, etc.).&lt;/li&gt;&lt;li&gt;Work with application servers (WebSphere, WebLogic) and J2EE-based applications.&lt;/li&gt;&lt;li&gt;Ensure secure transmission using SSL/TLS certificates and manage digital certificate renewals&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Mandatory Skills:&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;7+ years of hands-on experience with IBM ISAM and related modules.&lt;/li&gt;&lt;li&gt;In-depth knowledge of authentication and authorization protocols (SAML, OAuth, OIDC, Kerberos).&lt;/li&gt;&lt;li&gt;Experience with LDAP directories and directory integration.&lt;/li&gt;&lt;li&gt;Familiarity with PKI, SSL/TLS, and digital certificates.&lt;/li&gt;&lt;li&gt;Proficiency in Java/J2EE, scripting (Shell, Python), and automation tools (Ansible preferred).&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Preferred Skills:&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;3+ years of hands-on experience with RBAC, compliance solutions, and access certification.&lt;/li&gt;&lt;li&gt;Ability to analyze complex problems under pressure and devise effective solutions.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Qualifications:&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Bachelor&apos;s degree in Computer Science, Information Technology, or a related field.&lt;/li&gt;&lt;li&gt;Relevant certifications in security and access management are a plus.&lt;/li&gt;&lt;/ul&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Banking&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Virtusa&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/809783/ibm-isam-specialist-at-virtusa/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/809783/ibm-isam-specialist-at-virtusa/</link>
  <title>[Full Time] IBM ISAM Specialist at Virtusa</title>
  <dc:date>Thu, 05 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/811945/python-developer-for-data-engineering-at-msci-services/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;b&gt;Overview&lt;/b&gt;&lt;/div&gt;&lt;div&gt; &lt;div&gt; &lt;p&gt;The Data Engineering group within MSCI is responsible for delivering data products to MSCI&apos;s product lines globally. We&apos;re building a next-generation metadata-driven data platform that uses AI to automate and scale data onboarding. As part of a talented software development team in Mumbai, you&apos;ll have the opportunity to design and build state-of-the-art automation solutions that transform how MSCI ingests, governs, and delivers data to the investment community. This is a greenfield opportunity to architect systems that will reduce manual effort by 40-60% and accelerate vendor onboarding by 50-70%.&lt;/p&gt; &lt;/div&gt;&lt;/div&gt;&lt;div&gt; &lt;div&gt; &lt;/div&gt;&lt;/div&gt;&lt;div&gt; &lt;b&gt;Responsibilities&lt;/b&gt;&lt;/div&gt;&lt;ul&gt; &lt;li&gt;Design and develop AI-powered automation capabilities for data onboarding, including vendor file classification, metadata auto-suggestion, data profiling engines, and automated quality control frameworks&lt;/li&gt; &lt;li&gt;Build LLM-integrated code generation systems for ingestion pipelines, PySpark transformations, and Airflow DAG orchestration&lt;/li&gt; &lt;li&gt;Implement metadata management platforms that serve as the control plane for data lifecycle automation&lt;/li&gt; &lt;li&gt;Develop RESTful APIs and integration layers connecting AI services, data platforms (Snowflake, Databricks), and orchestration frameworks&lt;/li&gt; &lt;li&gt;Create human-in-the-loop workflows for validation, exception handling, and continuous model improvement&lt;/li&gt; &lt;li&gt;Collaborate with cross-functional teams across data engineering, governance, and product to deliver end-to-end automation solutions&lt;/li&gt; &lt;li&gt;Build scalable, fault-tolerant systems designed for metadata processing at scale&lt;/li&gt;&lt;/ul&gt;&lt;div&gt; &lt;b&gt;Qualifications&lt;/b&gt;&lt;/div&gt;&lt;p&gt;&lt;strong&gt;Required:&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt; &lt;li&gt;5-8 years of software development experience with strong Python programming expertise&lt;/li&gt; &lt;li&gt;Knowledge of data manipulation libraries (Pandas, Polars) and analysis workflows&lt;/li&gt; &lt;li&gt;Proficiency in SQL and data querying across modern data platforms&lt;/li&gt; &lt;li&gt;Understanding of columnar storage formats and time-series analytics (ClickHouse, Parquet, Iceberg)&lt;/li&gt; &lt;li&gt;Experience with AI-assisted development tools (GitHub Copilot, Cursor, or similar)&lt;/li&gt; &lt;li&gt;Strong understanding of RESTful API design and implementation&lt;/li&gt; &lt;li&gt;Experience with Git version control and collaborative development workflows&lt;/li&gt; &lt;li&gt;Demonstrated ability to take ownership of complex technical solutions end-to-end&lt;/li&gt; &lt;li&gt;Strong analytical and problem-solving skills with attention to data quality and reliability&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Preferred:&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt; &lt;li&gt;Hands-on experience with LLMs (OpenAI, Anthropic, or open-source models) and prompt engineering&lt;/li&gt; &lt;li&gt;Familiarity with Agentic AI frameworks and orchestration patterns&lt;/li&gt; &lt;li&gt;Experience with cloud platforms (Azure or GCP) and modern data infrastructure&lt;/li&gt; &lt;li&gt;Exposure to financial concepts, reference data, or market data processing&lt;/li&gt; &lt;li&gt;Experience building metadata-driven or self-service data platforms&lt;/li&gt;&lt;/ul&gt;&lt;div&gt; &lt;b&gt;&lt;/b&gt;&lt;/div&gt;&lt;p&gt;&lt;strong&gt;What we offer you&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt; &lt;li&gt;Transparent compensation schemes and comprehensive employee benefits, tailored to your location, ensuring your financial security, health, and overall wellbeing.&lt;/li&gt;&lt;/ul&gt;&lt;ul&gt; &lt;li&gt;Flexible working arrangements, advanced technology, and collaborative workspaces.&lt;/li&gt;&lt;/ul&gt;&lt;ul&gt; &lt;li&gt;A culture of high performance and innovation where we experiment with new ideas and take responsibility for achieving results.&lt;/li&gt;&lt;/ul&gt;&lt;ul&gt; &lt;li&gt;A global network of talented colleagues, who inspire, support, and share their expertise to innovate and deliver for our clients.&lt;/li&gt;&lt;/ul&gt;&lt;ul&gt; &lt;li&gt;Global Orientation program to kickstart your journey, followed by access to our Learning@MSCI platform, AI Learning Center , LinkedIn Learning Pro and tailored learning opportunities for ongoing skills development.&lt;/li&gt;&lt;/ul&gt;&lt;ul&gt; &lt;li&gt;Multi-directional career paths that offer professional growth and development through new challenges, internal mobility and expanded roles.&lt;/li&gt;&lt;/ul&gt;&lt;ul&gt; &lt;li&gt;We actively nurture an environment that builds a sense of inclusion belonging and connection, including eight Employee Resource Groups. All Abilities, Asian Support Network, Black Leadership Network, Climate Action Network, Hola! MSCI, Pride &amp;amp; Allies, Women in Tech, and Womens Leadership Forum.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;At MSCI we are passionate about what we do, and we are inspired by our vision  to power better decisions. Youll be part of an industry-leading network of creative, curious, and entrepreneurial pioneers. This is a space where you can challenge yourself, set new standards and perform beyond expectations for yourself, our clients, and our industry.&lt;/p&gt;&lt;p&gt;MSCI strengthens global markets by connecting participants across the financial ecosystem with a common language. Our research-based data, analytics and indexes, supported by advanced technology, set standards for global investors and help our clients understand risks and opportunities so they can make better decisions and unlock innovation. We serve asset managers and owners, private-market sponsors and investors, hedge funds, wealth managers, banks, insurers and corporates.&lt;/p&gt;&lt;p&gt;MSCI Inc. is an equal opportunity employer. It is the policy of the firm to ensure equal employment opportunity without discrimination or harassment on the basis of race, color, religion, creed, age, sex, gender, gender identity, sexual orientation, national origin, citizenship, disability, marital and civil partnership/union status, pregnancy (including unlawful discrimination on the basis of a legally protected parental leave), veteran status, or any other characteristic protected by law. MSCI is also committed to working with and providing reasonable accommodations to individuals with disabilities. If you are an individual with a disability and would like to request a reasonable accommodation for any part of the application process, please email Disability.Assistance@msci.com and indicate the specifics of the assistance needed. Please note, this e-mail is intended only for individuals who are requesting a reasonable workplace accommodation; it is not intended for other inquiries.&lt;/p&gt;&lt;p&gt;&lt;strong&gt;To all recruitment agencies&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;MSCI does not accept unsolicited CVs/Resumes. Please do not forward CVs/Resumes to any MSCI employee, location, or website. MSCI is not responsible for any fees related to unsolicited CVs/Resumes.&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Note on recruitment scams&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;We are aware of recruitment scams where fraudsters impersonating MSCI personnel may try and elicit personal information from job seekers. Read our full note on careers.msci.com&lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Financial Services&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;MSCI Services&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Mumbai&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/811945/python-developer-for-data-engineering-at-msci-services/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/811945/python-developer-for-data-engineering-at-msci-services/</link>
  <title>[Full Time] Python Developer For Data Engineering at MSCI Services</title>
  <dc:date>Wed, 04 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/809817/ey-gds-consulting-ai-and-data-data-architect-hadoop-manager-at-ey/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;strong&gt;The opportunity&lt;/strong&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;Were looking Senior Data Architect with 12+ years of progressive experience in data engineering, data warehousing, or big data roles, with at least 5 years focused specifically on data architecture. The candidate will be a technical expert in the modern big data ecosystem, with proven mastery of Hadoop, Hive, Spark, and Apache Iceberg. Candidate will be responsible for defining our strategic data architecture, setting technical standards, and leading the implementation of robust, scalable, and efficient data solutions.&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;&lt;strong&gt;Your key responsibilities&lt;/strong&gt;&lt;/span&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Work as a project manager to Lead the design and evolution of our large-scale data lakehouse architecture, ensuring it is scalable, reliable, and cost-effective.&lt;/li&gt;&lt;li&gt;Provide technical leadership and mentorship to data engineers and analysts. Collaborate closely with Software Engineers, and business stakeholders to understand requirements and deliver effective data solutions.&lt;/li&gt;&lt;li&gt;Experience in data modelling, data mapping, data profiling and meta data management.&lt;/li&gt;&lt;li&gt;Architect and tune high-performance data processing pipelines. Identify and resolve complex performance issues in distributed computing environments involving Spark execution, Hive query optimization, and Iceberg metadata management.&lt;/li&gt;&lt;li&gt;Expert Experience on Hadoop (HDFS, YARN), Hive (including LLAP, Tez), Spark (Structured Streaming, Spark SQL), and Apache Iceberg.&lt;/li&gt;&lt;li&gt;Expert-level proficiency in building and optimizing large-scale data processing pipelines using Spark (PySpark/Scala).&lt;/li&gt;&lt;li&gt;Deep understanding of Spark internals, execution plans, and tuning.&lt;/li&gt;&lt;li&gt;Extensive experience in writing, optimizing, and managing HiveQL scripts. Deep knowledge of Hive architecture, file formats (ORC, Parquet), and performance tuning.&lt;/li&gt;&lt;li&gt;Strong, hands-on experience with the core Hadoop ecosystem (HDFS, YARN, MapReduce). Understanding of cluster management and fundamentals.&lt;/li&gt;&lt;li&gt;Hands-on experience designing and implementing data lakes using Apache Iceberg as the table format. Must understand features like schema evolution, hidden partitioning, time travel, and performance benefits over Hive tables.&lt;/li&gt;&lt;li&gt;Experience in either Python (PySpark) or Scala.&lt;/li&gt;&lt;li&gt;Mastery of SQL and experience optimizing complex queries on massive datasets.&lt;/li&gt;&lt;li&gt;Experience with at least one major cloud platform (AWS (EMR, S3, Glue), Azure (Databricks, ADLS, Synapse), or GCP (Dataproc, BigQuery, GCS)).&lt;/li&gt;&lt;li&gt;Interface and communicate with the onsite teams directly to understand the requirement and determine the optimum solutions.&lt;/li&gt;&lt;li&gt;Create technical solutions as per business needs by translating their requirements and finding innovative solution options.&lt;/li&gt;&lt;li&gt;Lead and mentor a team throughout design, development and delivery phases and keep the team intact on high pressure situations.&lt;/li&gt;&lt;li&gt;Get involved in business development activities like creating proof of concepts (POCs), point of views (POVs), assist in proposal writing and service offering development, and capable of developing creative power point content for presentations.&lt;/li&gt;&lt;li&gt;Create and maintain detailed architecture diagrams, data flow maps, and other technical documentation.&lt;/li&gt;&lt;li&gt;Participate in organization-level initiatives and operational activities.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;span&gt;&lt;strong&gt;Skills and attributes for success&lt;/strong&gt;&lt;/span&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Use an issue-based approach to deliver growth, market and portfolio strategy engagements for corporates&lt;/li&gt;&lt;li&gt;Strong communication, presentation and team building skills and experience in producing high quality reports, papers, and presentations.&lt;/li&gt;&lt;li&gt;Experience in executing and managing research and analysis of companies and markets, preferably from a commercial due diligence standpoint.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;span&gt;&lt;strong&gt;Ideally, youll also have&lt;/strong&gt;&lt;/span&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;8-10 years of experience in Banking and capital markets sector preferred&lt;/li&gt;&lt;li&gt;Cloud architect certifications&lt;/li&gt;&lt;li&gt;Experience using Agile methodologies.&lt;/li&gt;&lt;li&gt;Experience with real-time stream processing technologies (Kafka, Flink, Spark Streaming).&lt;/li&gt;&lt;li&gt;Experience with containerization and orchestration tools (Docker, Kubernetes).&lt;/li&gt;&lt;li&gt;Experience with DevOps/DataOps principles and CI/CD pipelines for data projects.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;span&gt;&lt;strong&gt;What we look for&lt;/strong&gt;&lt;/span&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;A Team of people with commercial acumen, technical experience and enthusiasm to learn new things in this fast-moving environment&lt;/li&gt;&lt;li&gt;An opportunity to be a part of market-leading, multi-disciplinary team of 1400 + professionals, in the only integrated global transaction business worldwide.&lt;/li&gt;&lt;li&gt;Opportunities to work with EY Advisory practices globally with leading businesses across a range of industries&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;span&gt;&lt;strong&gt;&lt;/strong&gt;&lt;/span&gt;&lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Banking&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Architect / Designer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;EY&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/809817/ey-gds-consulting-ai-and-data-data-architect-hadoop-manager-at-ey/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/809817/ey-gds-consulting-ai-and-data-data-architect-hadoop-manager-at-ey/</link>
  <title>[Full Time] Ey-gds Consulting-ai And Data-data Architect-hadoop-manager at EY</title>
  <dc:date>Wed, 04 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/809848/data-architect-at-cgi/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;Position Description:&lt;/b&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Qualification&lt;/strong&gt;: &lt;/p&gt;&lt;ul&gt;&lt;li&gt;Bachelors or Masters degree in Computer Science, Engineering, or a related field&lt;/li&gt;&lt;li&gt;We are seeking a skilled Data Architect with 10+ years of hands-on experience in managing enterprise databases and supporting critical application deployments&lt;/li&gt;&lt;li&gt;The role requires strong expertise in data migration, database object creation (tables, indexes, procedures), performance tuning, and deployment scripting.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Key Responsibilities&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Perform data migration across environments and platforms&lt;/li&gt;&lt;li&gt;Develop and maintain conceptual, logical, and physical data models to support business and technical requirements&lt;/li&gt;&lt;li&gt;Analyse existing systems and translate business needs into data structures and relationships&lt;/li&gt;&lt;li&gt;Define data standards, naming conventions, and metadata for consistency across the organization&lt;/li&gt;&lt;li&gt;Ensure models support performance, scalability, and integration requirements&lt;/li&gt;&lt;li&gt;Design and create database objects such as tables, indexes, stored procedures, and views&lt;/li&gt;&lt;li&gt;Optimize queries and perform performance tuning to ensure efficient database operations&lt;/li&gt;&lt;li&gt;Prepare and execute deployment scripts for releases and environment refreshes&lt;/li&gt;&lt;li&gt;Monitor database health, troubleshoot issues, and ensure availability and reliability&lt;/li&gt;&lt;li&gt;Collaborate with development and DevOps teams to support application deployments&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Required Skills &lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Strong expertise in data modeling methodologies (3NF, dimensional/star schema, snowflake, etc)&lt;/li&gt;&lt;li&gt;Proficiency with data modeling tools (ERwin, ER/Studio, PowerDesigner, or similar)&lt;/li&gt;&lt;li&gt;Strong SQL knowledge and understanding of RDBMS concepts&lt;/li&gt;&lt;li&gt;Proven experience in data migration, performance tuning, and query optimization&lt;/li&gt;&lt;li&gt;Strong skills in SQL scripting and automation&lt;/li&gt;&lt;li&gt;Hands-on knowledge of backup/recovery tools and deployment processes&lt;/li&gt;&lt;li&gt;Good understanding of indexes, partitions, and database design principles&lt;/li&gt;&lt;li&gt;Excellent communication and problem-solving skills&lt;/li&gt;&lt;li&gt;Nice to Have Experience with CI/CD pipelines and DevOps-based database deployments&lt;/li&gt;&lt;li&gt;Exposure to cloud databases (AWS RDS, Azure SQL, GCP) is a plus.&lt;/li&gt;&lt;/ul&gt;&lt;div&gt;&lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;Application Development&lt;/li&gt;&lt;li&gt;Applications Administration&lt;/li&gt;&lt;li&gt;Database&lt;/li&gt;&lt;/ul&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;CGI&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/809848/data-architect-at-cgi/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/809848/data-architect-at-cgi/</link>
  <title>[Full Time] Data Architect at CGI</title>
  <dc:date>Wed, 04 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/809807/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Azure Data Services&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;Microsoft SQL Server, Microsoft Azure Databricks, Microsoft Fabric&lt;br&gt;Minimum &lt;b&gt;5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the data architecture aligns with the overall business objectives and technical specifications. You will collaborate with various teams to gather requirements and provide insights that drive the development of robust data solutions, while also ensuring data integrity and accessibility across the organization. &lt;b&gt;Roles Responsibilities:&lt;/b&gt; &lt;li&gt;Expected to be an SME.&lt;/li&gt;&lt;li&gt;Collaborate and manage the team to perform.&lt;/li&gt;&lt;li&gt;Responsible for team decisions.&lt;/li&gt;&lt;li&gt;Engage with multiple teams and contribute on key decisions.&lt;/li&gt;&lt;li&gt;Provide solutions to problems for their immediate team and across multiple teams.&lt;/li&gt;&lt;li&gt;Facilitate knowledge sharing and mentoring within the team to enhance overall performance.&lt;/li&gt;&lt;li&gt;Evaluate and recommend new technologies and tools to improve data architecture and processes. &lt;b&gt;Professional Technical Skills:&lt;/b&gt; &lt;b&gt;&lt;br&gt;Must To Have Skills:&lt;/b&gt;&lt;br&gt; Proficiency in Microsoft Azure Data Services.&lt;/li&gt;&lt;li&gt;Good To Have Skills:Experience with Microsoft SQL Server, Microsoft Azure Databricks, Microsoft Fabric.&lt;/li&gt;&lt;li&gt;Strong understanding of data modeling techniques and best practices.&lt;/li&gt;&lt;li&gt;Experience with data integration and ETL processes.&lt;/li&gt;&lt;li&gt;Familiarity with cloud-based data storage solutions and architectures. &lt;b&gt;Additional Information:&lt;/b&gt; &lt;/li&gt;&lt;li&gt;The candidate should have minimum 5 years of experience in Microsoft Azure Data Services.&lt;/li&gt;&lt;li&gt;This position is based at our Hyderabad office.&lt;/li&gt;&lt;li&gt;A 15 years full time education is required.&lt;b&gt;Qualification&lt;/b&gt;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/809807/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/809807/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Tue, 03 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/809871/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Azure Data Services&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;Microsoft SQL Server, Microsoft Azure Databricks, Microsoft Fabric&lt;br&gt;Minimum &lt;b&gt;5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the data architecture aligns with the overall business objectives and technical specifications. You will collaborate with various teams to gather requirements and provide insights that drive the development of robust data solutions, while also ensuring data integrity and accessibility across the organization. &lt;b&gt;Roles Responsibilities:&lt;/b&gt; &lt;li&gt;Expected to be an SME.&lt;/li&gt;&lt;li&gt;Collaborate and manage the team to perform.&lt;/li&gt;&lt;li&gt;Responsible for team decisions.&lt;/li&gt;&lt;li&gt;Engage with multiple teams and contribute on key decisions.&lt;/li&gt;&lt;li&gt;Provide solutions to problems for their immediate team and across multiple teams.&lt;/li&gt;&lt;li&gt;Facilitate knowledge sharing and mentoring within the team to enhance overall performance.&lt;/li&gt;&lt;li&gt;Evaluate and recommend new technologies and tools to improve data architecture and processes. &lt;b&gt;Professional Technical Skills:&lt;/b&gt; &lt;b&gt;&lt;br&gt;Must To Have Skills:&lt;/b&gt;&lt;br&gt; Proficiency in Microsoft Azure Data Services.&lt;/li&gt;&lt;li&gt;Good To Have Skills:Experience with Microsoft SQL Server, Microsoft Azure Databricks, Microsoft Fabric.&lt;/li&gt;&lt;li&gt;Strong understanding of data modeling techniques and best practices.&lt;/li&gt;&lt;li&gt;Experience with data integration and ETL processes.&lt;/li&gt;&lt;li&gt;Familiarity with cloud-based data storage solutions and architectures. &lt;b&gt;Additional Information:&lt;/b&gt; &lt;/li&gt;&lt;li&gt;The candidate should have minimum 5 years of experience in Microsoft Azure Data Services.&lt;/li&gt;&lt;li&gt;This position is based at our Hyderabad office.&lt;/li&gt;&lt;li&gt;A 15 years full time education is required.&lt;b&gt;Qualification&lt;/b&gt;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/809871/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/809871/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Tue, 03 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/809839/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Azure Data Services&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;Microsoft SQL Server, Microsoft Azure Databricks, Microsoft Fabric&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration. You will collaborate with various teams to ensure that the data architecture aligns with business objectives and technical specifications, while also addressing any challenges that arise in the data management process. Your role will require a blend of analytical thinking and creative problem-solving to develop efficient data solutions that support the overall goals of the organization. &lt;b&gt;Roles Responsibilities:&lt;/b&gt; &lt;li&gt;Expected to be an SME.&lt;/li&gt;&lt;li&gt;Collaborate and manage the team to perform.&lt;/li&gt;&lt;li&gt;Responsible for team decisions.&lt;/li&gt;&lt;li&gt;Engage with multiple teams and contribute on key decisions.&lt;/li&gt;&lt;li&gt;Provide solutions to problems for their immediate team and across multiple teams.&lt;/li&gt;&lt;li&gt;Facilitate knowledge sharing sessions to enhance team capabilities and foster a culture of continuous improvement.&lt;/li&gt;&lt;li&gt;Develop and maintain comprehensive documentation of data architecture and design decisions to ensure clarity and consistency across projects. &lt;b&gt;Professional Technical Skills:&lt;/b&gt; &lt;b&gt;&lt;br&gt;Must To Have Skills:&lt;/b&gt;&lt;br&gt; Proficiency in Microsoft Azure Data Services.&lt;/li&gt;&lt;li&gt;Good To Have Skills:Experience with Microsoft Fabric, Microsoft Azure Databricks, Microsoft SQL Server.&lt;/li&gt;&lt;li&gt;Strong understanding of data modeling techniques and best practices.&lt;/li&gt;&lt;li&gt;Experience with data integration and ETL processes.&lt;/li&gt;&lt;li&gt;Familiarity with cloud-based data storage solutions and architectures. &lt;b&gt;Additional Information:&lt;/b&gt; &lt;/li&gt;&lt;li&gt;The candidate should have minimum 7.5 years of experience in Microsoft Azure Data Services.&lt;/li&gt;&lt;li&gt;This position is based at our Hyderabad office.&lt;/li&gt;&lt;li&gt;A 15 years full time education is required.&lt;b&gt;Qualification&lt;/b&gt;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/809839/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/809839/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Tue, 03 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/809814/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Azure Data Services&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;Microsoft SQL Server, Microsoft Azure Databricks, Microsoft Fabric&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration. You will collaborate with various teams to ensure that the data architecture aligns with business objectives and technical specifications, while also addressing any challenges that arise in the data management process. Your role will require a blend of analytical thinking and creative problem-solving to develop efficient data solutions that support the overall goals of the organization. &lt;b&gt;Roles Responsibilities:&lt;/b&gt; &lt;li&gt;Expected to be an SME.&lt;/li&gt;&lt;li&gt;Collaborate and manage the team to perform.&lt;/li&gt;&lt;li&gt;Responsible for team decisions.&lt;/li&gt;&lt;li&gt;Engage with multiple teams and contribute on key decisions.&lt;/li&gt;&lt;li&gt;Provide solutions to problems for their immediate team and across multiple teams.&lt;/li&gt;&lt;li&gt;Facilitate knowledge sharing sessions to enhance team capabilities and foster a culture of continuous improvement.&lt;/li&gt;&lt;li&gt;Develop and maintain comprehensive documentation of data architecture and design decisions to ensure clarity and consistency across projects. &lt;b&gt;Professional Technical Skills:&lt;/b&gt; &lt;b&gt;&lt;br&gt;Must To Have Skills:&lt;/b&gt;&lt;br&gt; Proficiency in Microsoft Azure Data Services.&lt;/li&gt;&lt;li&gt;Good To Have Skills:Experience with Microsoft Fabric, Microsoft Azure Databricks, Microsoft SQL Server.&lt;/li&gt;&lt;li&gt;Strong understanding of data modeling techniques and best practices.&lt;/li&gt;&lt;li&gt;Experience with data integration and ETL processes.&lt;/li&gt;&lt;li&gt;Familiarity with cloud-based data storage solutions and architectures. &lt;b&gt;Additional Information:&lt;/b&gt; &lt;/li&gt;&lt;li&gt;The candidate should have minimum 7.5 years of experience in Microsoft Azure Data Services.&lt;/li&gt;&lt;li&gt;This position is based at our Hyderabad office.&lt;/li&gt;&lt;li&gt;A 15 years full time education is required.&lt;b&gt;Qualification&lt;/b&gt;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/809814/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/809814/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Tue, 03 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/809869/eim-data-architect-at-virtusa/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&lt;strong&gt;Primary Skills&lt;/strong&gt;&lt;/p&gt;&lt;div&gt;&lt;li&gt;Data Architecture&lt;/li&gt;&lt;/div&gt;&lt;p&gt;&lt;strong&gt;Secondary Skills&lt;/strong&gt;&lt;/p&gt;&lt;div&gt;&lt;li&gt;SQL&lt;/li&gt;&lt;li&gt;SAP&lt;/li&gt;&lt;li&gt;Informatica&lt;/li&gt;&lt;/div&gt;&lt;p&gt;We are seeking an experienced Data Architect to design, implement, and maintain scalable data solutions that enable advanced analytics and business intelligence. The ideal candidate will have strong expertise in data modeling, integration, and governance, with additional experience in Informatica and SAP environments being a significant advantage.&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Responsibility&lt;/strong&gt;&lt;/p&gt;&lt;div&gt;&lt;li&gt;Minimum 10+ years of IT experience.&lt;/li&gt;&lt;li&gt;Design and maintain the organizations data architecture, ensuring alignment with business goals and technology strategy&lt;/li&gt;&lt;li&gt;Develop data models, data flow diagrams, and metadata documentation for enterprise systems.&lt;/li&gt;&lt;li&gt;Define and implement data integration strategies across on premise and cloud platforms.&lt;/li&gt;&lt;li&gt;Work closely with business and technical teams to translate requirements into scalable data solutions.&lt;/li&gt;&lt;li&gt;Collaborate with cross functional teams to integrate SAP data into enterprise data platforms.&lt;/li&gt;&lt;li&gt;Data Profiling, Data relationships, Data Modelling, Data Lineage, Impact Analysis, Design. &lt;/li&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Banking&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Virtusa&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/809869/eim-data-architect-at-virtusa/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/809869/eim-data-architect-at-virtusa/</link>
  <title>[Full Time] EIM Data Architect at Virtusa</title>
  <dc:date>Tue, 03 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/812281/datawarehouse-architect-at-icici-bank/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&lt;strong&gt;&lt;span&gt;Essential Services: Role &amp;amp; Location fungibility&lt;/span&gt;&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;The role descriptions give you an overview of the responsibilities; it is only directional and guiding in nature. At ICICI Bank, we believe in serving our customers beyond our role definition, product boundaries, and domain limitations through our philosophy of customer 360-degree. In essence, this captures our belief in serving the entire banking needs of our customers as One Bank, One Team. To achieve this, employees at ICICI Bank are expected to be role and location-fungible with the understanding that Banking is an essential service.&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;&lt;span&gt;&lt;/span&gt;&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;&lt;span&gt;About the Role:&lt;/span&gt;&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;As a Data Warehouse Architect, you will be responsible for managing and enhancing data warehouse that manages large volume of customer-life cycle data flowing in from various applications within guardrails of risk and compliance.&lt;strong&gt;&lt;/strong&gt;You will be managing the day-to-day operations of data warehouse i.e. Vertica. In this role responsibility, you will manage a team of data warehouse engineers to develop data modelling, designing ETL data pipeline, issue management, upgrades, performance fine-tuning, migration, governance and security framework of the data warehouse. This role enables the Bank to maintain huge data sets in a structured manner that is amenable for data intelligence. The data warehouse supports numerous information systems used by various business groups to derive insights.&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;As a natural progression, the data warehouses will be gradually migrated to Data Lake enabling better analytical advantage. The role holder will also be responsible for guiding the team towards this migration.&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;&lt;span&gt;Key Responsibilities:&lt;/span&gt;&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;&lt;strong&gt;Data Pipeline Design:&lt;/strong&gt; Responsible for designing and developing ETL data pipelines that can help in organising large volumes of data. Use of data warehousing technologies to ensure that the data warehouse is efficient, scalable, and secure.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Issue Management&lt;/strong&gt;: Responsible for ensuring that the data warehouse is running smoothly. Monitor system performance, diagnose and troubleshoot issues,&lt;strong&gt;&lt;/strong&gt;and make necessary changes to optimize system performance.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Collaboration:&lt;/strong&gt; Collaborate with cross-functional teams to implement upgrades, migrations and continuous improvements.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Data Integration and Processing&lt;/strong&gt;: Responsible for processing, cleaning, and integrating large data sets from various sources to ensure that the data is accurate, complete, and consistent.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Data Modelling&lt;/strong&gt;: Responsible for designing and implementing data modelling solutions to ensure that the organizations data is properly structured and organized for analysis.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;span&gt;&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;&lt;span&gt;Key Qualifications &amp;amp; Skills:&lt;/span&gt;&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;&lt;strong&gt;Education Qualification:&lt;/strong&gt; B.E./B. Tech. in Computer Science, Information Technology or equivalent domain with 10 to 12 years of experience and at least 5 years or relevant work experience in Datawarehouse/mining/BI/MIS.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Experience in Data Warehousing:&lt;/strong&gt; Knowledge on ETL and data technologies and outline future vision in OLTP, OLAP (Oracle / MSSQL). Data Modelling, Data Analysis and Visualization experience (Analytical tools experience like Power BI / SAS / ClickView / Tableu etc). Good to have exposure to Azure Cloud Data platform services like COSMOS, Azure Data Lake, Azure Synapse, and Azure Data factory.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Synergize with the Team:&lt;/strong&gt; Regular interaction with business/product/functional teams to create mobility solutions.&lt;/li&gt;&lt;li&gt;&lt;strong&gt;Certification:&lt;/strong&gt; Azure certified DP 900, PL 300, DP 203 or any other Data platform/Data Analyst certifications.&lt;/li&gt;&lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Banking&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;ICICI Bank&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/812281/datawarehouse-architect-at-icici-bank/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/812281/datawarehouse-architect-at-icici-bank/</link>
  <title>[Full Time] Datawarehouse Architect at ICICI Bank</title>
  <dc:date>Mon, 02 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/809830/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;Project Role :&lt;/b&gt;Data Architect&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Azure Data Services&lt;br&gt;&lt;b&gt;Good to have skills :&lt;/b&gt;Python (Programming Language), Data Engineering, Microsoft Power Business Intelligence (BI)&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:&lt;br&gt;As a Data Architect, you will define the data requirements and structure for the application. A typical day involves collaborating with various teams to model and design the application data structure, ensuring optimal storage and integration solutions are in place. You will engage in discussions to align data strategies with business objectives, while also addressing any challenges that arise in the data architecture process. Your role will require a keen understanding of data flows and the ability to translate complex requirements into actionable designs, fostering a collaborative environment that encourages innovation and efficiency. &lt;b&gt;Roles &amp;amp; Responsibilities:&lt;/b&gt;&lt;br&gt;&lt;li&gt;Expected to be an SME.&lt;/li&gt;&lt;li&gt;Collaborate and manage the team to perform.&lt;/li&gt;&lt;li&gt;Responsible for team decisions.&lt;/li&gt;&lt;li&gt;Engage with multiple teams and contribute on key decisions.&lt;/li&gt;&lt;li&gt;Provide solutions to problems for their immediate team and across multiple teams.&lt;/li&gt;&lt;li&gt;Facilitate knowledge sharing sessions to enhance team capabilities.&lt;/li&gt;&lt;li&gt;Develop and maintain documentation related to data architecture and design. &lt;b&gt;Professional &amp;amp; Technical Skills:&lt;/b&gt;&lt;br&gt;&lt;b&gt;Must To Have Skills:&lt;/b&gt;&lt;br&gt;Proficiency in Microsoft Azure Data Services.&lt;/li&gt;&lt;li&gt;Good To Have Skills:Experience with Data Engineering, Python (Programming Language), Microsoft Power Business Intelligence (BI).&lt;/li&gt;&lt;li&gt;Strong understanding of data modeling techniques and best practices.&lt;/li&gt;&lt;li&gt;Experience with data integration tools and ETL processes.&lt;/li&gt;&lt;li&gt;Familiarity with cloud-based data storage solutions and architectures. &lt;b&gt;Additional Information:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;The candidate should have minimum 7.5 years of experience in Microsoft Azure Data Services.&lt;/li&gt;&lt;li&gt;This position is based at our Hyderabad office.&lt;/li&gt;&lt;li&gt;A 15 years full time education is required.&lt;b&gt;Qualification&lt;/b&gt;&lt;br&gt;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/809830/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/809830/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Mon, 02 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/810206/python-django-lead-at-infobeans/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt;&lt;div&gt;&lt;div&gt;&lt;span&gt;&lt;p&gt;&lt;span&gt;&lt;strong&gt;Job Overview&lt;/strong&gt;:&lt;/span&gt;&lt;span&gt; We are seeking an experienced Django Backend Lead Developer to join our team. The ideal candidate will have a strong background in backend development, cloud technologies, and big data processing. This role involves leading technical projects, mentoring junior developers, and ensuring the delivery of high-quality solutions.&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;&lt;strong&gt;Responsibilities&lt;/strong&gt;:&lt;/span&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Lead the development of backend micro services using Django.&lt;/li&gt;&lt;li&gt;Design and implement scalable and secure APIs.&lt;/li&gt;&lt;li&gt;Integrate Azure Cloud services for application deployment and management.&lt;/li&gt;&lt;li&gt;Utilize Azure Databricks for big data processing and analytics.&lt;/li&gt;&lt;li&gt;Implement data processing pipelines using PySpark.&lt;/li&gt;&lt;li&gt;Collaborate with front-end developers, product managers, and other stakeholders to deliver comprehensive solutions.&lt;/li&gt;&lt;li&gt;Conduct code reviews and ensure adherence to best practices.&lt;/li&gt;&lt;li&gt;Mentor and guide junior developers.&lt;/li&gt;&lt;li&gt;Optimize database performance and manage data storage solutions.&lt;/li&gt;&lt;li&gt;Ensure high performance and security standards for applications.&lt;/li&gt;&lt;li&gt;Participate in architecture design and technical decision-making.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;span&gt;&lt;strong&gt;Qualifications&lt;/strong&gt;:&lt;/span&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Bachelors degree in Computer Science, Information Technology, or a related field.&lt;/li&gt;&lt;li&gt;8+ years of experience in backend development.&lt;/li&gt;&lt;li&gt;8+ years of experience with Django.&lt;/li&gt;&lt;li&gt;Proven experience with Azure Cloud services.&lt;/li&gt;&lt;li&gt;Experience with Azure Databricks and PySpark.&lt;/li&gt;&lt;li&gt;Strong understanding of RESTful APIs and web services.&lt;/li&gt;&lt;li&gt;Excellent communication and problem-solving skills.&lt;/li&gt;&lt;li&gt;Familiarity with Agile methodologies.&lt;/li&gt;&lt;li&gt;Experience with database management (SQL and NoSQL).&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;span&gt;&lt;strong&gt;Skills&lt;/strong&gt;: &lt;/span&gt;&lt;span&gt;Django, Python, Azure Cloud, Azure Databricks, Delta Lake and Delta tables, PySpark, SQL/NoSQL databases, RESTful APIs, Git, and Agile methodologies&lt;/span&gt;&lt;/p&gt;&lt;div&gt;&lt;br&gt;&lt;b&gt;Location - &lt;/b&gt;Pune,Nagar,Indore,Bengaluru&lt;/div&gt;&lt;/span&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Infobeans&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Indore&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/810206/python-django-lead-at-infobeans/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/810206/python-django-lead-at-infobeans/</link>
  <title>[Full Time] Python/Django Lead at Infobeans</title>
  <dc:date>Mon, 02 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/810282/senior-software-analyst-at-softtek/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;Job Description for Senior Software Analyst at SOFTTEK INDIA PRIVATE LIMITED: We are seeking a skilled Senior Software Analyst to join our dynamic team at SOFTTEK INDIA PRIVATE LIMITED. The ideal candidate will have a strong background in software development and analysis, capable of overseeing and improving software systems to maximize efficiency and effectiveness. You will work closely with stakeholders to gather requirements, design solutions, and ensure that software projects meet business objectives. Key Responsibilities:&lt;br&gt; - Analyze existing software systems and processes, identifying areas for improvement and optimization.&lt;br&gt; - Collaborate with cross-functional teams, including developers, project managers, and business analysts, to gather requirements and translate them into technical specifications.&lt;br&gt; - Design and implement software solutions, ensuring alignment with best practices and company standards.&lt;br&gt; - Conduct rigorous testing of software applications to ensure quality and functionality before deployment.&lt;br&gt; - Provide technical support and guidance to junior team members, fostering knowledge sharing and professional development.&lt;br&gt; - Stay updated with the latest industry trends and technologies to advance the companys software capabilities. Skills and Tools Required:&lt;br&gt; - Proficiency in programming languages such as Java, C#, or Python.&lt;br&gt; - Strong experience with databases such as SQL Server, Oracle, or MySQL.&lt;br&gt; - Knowledge of software development methodologies, including Agile and Scrum.&lt;br&gt; - Familiarity with version control systems like Git.&lt;br&gt; - Excellent analytical and problem-solving skills.&lt;br&gt; - Strong communication and interpersonal skills to collaborate effectively with various stakeholders.&lt;br&gt; - Experience with software testing tools and frameworks is a plus.&lt;br&gt; - Knowledge of cloud platforms, such as AWS or Azure, is an advantage. Join our innovative team at SOFTTEK INDIA PRIVATE LIMITED and contribute to exciting projects that make a difference in the tech landscape.&lt;/p&gt; &lt;br&gt;&lt;b&gt; Roles and Responsibilities&lt;/b&gt; &lt;br&gt;&lt;ul&gt; &lt;li&gt;Data Engineer:&lt;/li&gt; &lt;li&gt;4+ years of ETL experience&lt;/li&gt; &lt;li&gt;Clover is a must&lt;/li&gt; &lt;li&gt;Data Bricks is nice to have (b/c we aren&apos;t sure whether we&apos;ll get beyond Clover day to day)&lt;/li&gt; &lt;li&gt;Strong SQL experience&lt;/li&gt; &lt;li&gt;Strong API consumer experience&lt;/li&gt; &lt;li&gt;SOAP (at least a small amount of experience preferred)&lt;/li&gt; &lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Softtek&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/810282/senior-software-analyst-at-softtek/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/810282/senior-software-analyst-at-softtek/</link>
  <title>[Full Time] Senior Software Analyst at Softtek</title>
  <dc:date>Mon, 02 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/809820/process-mining-platform-engineer-at-pepsico/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;strong&gt; Overview &lt;/strong&gt; &lt;p&gt; &lt;span&gt; As a process mining platform engineer you will be responsible for catering to devsecops requirements from sector teams on the platform. Resolving platform incidents and fulfilling the service requests. This role will also involve platform engineering activities on ETL, administration and enabling of new capabilities on the platform. &lt;/span&gt; &lt;/p&gt;&lt;div&gt; &lt;/div&gt; &lt;br&gt; &lt;strong&gt; Responsibilities &lt;/strong&gt; &lt;p&gt; &lt;span&gt; Experienced in data extraction, data modelling and dashboarding of process mining models on Celonis.,Experienced with agile development methodology (i.e. Safe Agile), Coordinate with process owners and business to understand the as-is process and the requirements for process mining .,Reports status, issues and risks to tech leads on a regular basis,Understand existing processes and facilitate change requirements as part of a change control process,Responsible for the requirement understanding, development, testing, implementation and maintenance of new and existing process mining models &lt;/span&gt; &lt;/p&gt;&lt;div&gt; &lt;/div&gt; &lt;br&gt; &lt;strong&gt; Qualifications &lt;/strong&gt; &lt;p&gt; &lt;span&gt; Engineering Degree or PG &lt;/span&gt; &lt;/p&gt;&lt;div&gt; &lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Beverage&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Architect / Designer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Pepsico&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/809820/process-mining-platform-engineer-at-pepsico/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/809820/process-mining-platform-engineer-at-pepsico/</link>
  <title>[Full Time] Process Mining Platform Engineer at Pepsico</title>
  <dc:date>Mon, 02 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/810144/oracle-fusion-erp-security-analyst-at-infobeans/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt;&lt;div&gt;&lt;div&gt;&lt;p&gt;&lt;span&gt;We are seeking an Oracle FusionCloud ERP (ERP) functional support to help implementing key Financialinitiatives on the Oracle platform due to rapid growth in XXX legal entitiesand ledgers, including the formation of Bank Holding Company and GroupCompanies. The Oracle Fusion ERPSecurity Analyst role typically involves managing user access, roles,and security configurations within Oracle Cloud ERP systems, ensuring datasecurity, and compliance with relevant policies. The role often requiresa combination of technical and functional expertise, including experiencewith Oracle Fusion Cloud applications, security concepts like &lt;/span&gt;&lt;span&gt;and &lt;/span&gt;&lt;span&gt;, andsecurity auditing. Project Management across the full SDLC is a plus forthis position. The scope of the rolesupports financial business users in North America and EMEA locations.&lt;/span&gt;&lt;span&gt;&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;&lt;/span&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;b&gt;&lt;span&gt;Position Responsibilities/Duties&lt;/span&gt;&lt;/b&gt;&lt;/p&gt;&lt;p&gt;&lt;b&gt;&lt;/b&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Provisioning and managing useraccounts, roles, and permissions based on business requirements. &lt;/li&gt;&lt;li&gt;Defining and implementingsecurity policies and access controls for Oracle Cloud applications. &lt;/li&gt;&lt;li&gt;Conducting security assessmentsof Oracle Cloud environments and applications to identify vulnerabilities andensure compliance. &lt;/li&gt;&lt;li&gt;Ensuring compliance withrelevant security standards, policies, and regulations (e.g., ,NYDFS, etc). &lt;/li&gt;&lt;li&gt;Responding to security incidents, coordinating with other teams to resolve issues, and implementingpreventative measures. &lt;/li&gt;&lt;li&gt;Designing and implementing security roles that align with business processes and adhere to leastprivilege principles. &lt;/li&gt;&lt;li&gt;Identifying and mitigating potential SOD conflicts within the system. Creating and maintaining documentation for security configurations, policies, and procedures. &lt;/li&gt;&lt;li&gt;Collaboratingwith business users, IT teams, and other stakeholders to address security-relatedneeds. &lt;/li&gt;&lt;li&gt;Ensuring that proposedsolutions comply with the companys technology direction. Ensuring compliance withcompanys change and security policies. &lt;/li&gt;&lt;li&gt;Working closely with Oracle applicationteam (internal and external) on security design and related applicationconfiguration.&lt;/li&gt;&lt;li&gt;Provide general technical support for Oracle Fusion Cloud ERP system and usertraining Late night production supportand weekend implementation work will be required End user requirements management (20%)&lt;/li&gt;&lt;li&gt;Designing, configuring, developing, testing, and improving Oracle Financialsplatform. (50%)Project Management (5%)Production support for ERP issues. (25%)&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;span&gt;&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;&lt;/span&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;b&gt;&lt;span&gt;Experience/Knowledge Requirements&lt;/span&gt;&lt;/b&gt;&lt;/p&gt;&lt;p&gt;&lt;b&gt;&lt;/b&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Demonstrated hands ontechnical understanding of Oracle Fusion Cloud ERP concepts and generalmodule functionality &lt;/li&gt;&lt;li&gt;Ability to clearly describeend-to-end business processes and the key integration points, workcross-functionally across different business processes within anorganization, and knowledge of challenges faced by implementation teams &lt;/li&gt;&lt;li&gt;Strong analytical, written,and verbal communication skills &lt;/li&gt;&lt;li&gt;Project Management skillsusing MS Project a plus &lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;span&gt;&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;&lt;/span&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;b&gt;&lt;span&gt;Required Skills/Abilities&lt;/span&gt;&lt;/b&gt;&lt;/p&gt;&lt;p&gt;&lt;b&gt;&lt;/b&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Must have Strong understandingof Oracle Fusion Cloud security concepts, including the Security Console, and experience with relevant tools and technologies. &lt;/li&gt;&lt;li&gt;Severalyears of experience with Oracle Fusion Cloud applications, including experience with security configuration, role-based access control (RBAC), and segregation of duties (SOD). &lt;/li&gt;&lt;li&gt;Excellent communication skillswith the ability to drive consensus and absorb and present complex ideas in asuccinct and accurate manner. &lt;/li&gt;&lt;li&gt;Have a team oriented approach &lt;/li&gt;&lt;li&gt;Minimum 5 years of experiencein Oracle Fusion Cloud ERP experience is a must Experience in finance andaccounting industry a plus &lt;/li&gt;&lt;li&gt;Should have strong Englishcommunication and writing skills &lt;/li&gt;&lt;li&gt;Ability to work independently and as part of a team &lt;/li&gt;&lt;li&gt;Excellent troubleshooting andproblem solving abilities &lt;/li&gt;&lt;li&gt;Must be willing to work in anenergetic, fast paced and team-oriented development environment &lt;/li&gt;&lt;li&gt;Ability to manage multiplepriorities effectively is a necessity &lt;/li&gt;&lt;li&gt;Minimum 3+ year TOAD/SQL experience is a must &lt;/li&gt;&lt;li&gt;University Degree/Preferably in a field related to computer science/software engineering&lt;/li&gt;&lt;li&gt;Oracle Fusion Cloud certifications a plus&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;span&gt;&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;&lt;/span&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;b&gt;&lt;span&gt;Skills and Special Requirements (heavy lifting, excessive overtime, etc.)&lt;/span&gt;&lt;/b&gt;&lt;/p&gt;&lt;p&gt;&lt;b&gt;&lt;/b&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;Must be willing to work some weekends and late nights to support production, upgrades,changes, and user support via on-call rotation.&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;&lt;/span&gt;&lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;div&gt;&lt;div&gt;&lt;/div&gt;&lt;/div&gt;&lt;b&gt;Location - &lt;/b&gt;Pune,Nagar,Indore,Bengaluru&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Infobeans&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Indore&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/810144/oracle-fusion-erp-security-analyst-at-infobeans/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/810144/oracle-fusion-erp-security-analyst-at-infobeans/</link>
  <title>[Full Time] Oracle Fusion ERP - Security Analyst at Infobeans</title>
  <dc:date>Mon, 02 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/810057/oracle-fusion-oci-engineer-at-infobeans/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt;&lt;div&gt;&lt;div&gt;&lt;p&gt;&lt;span&gt;An Oracle EBS &amp;amp; Cloud Senior Technical Consultant with expertise inintegrations, Oracle Integration Cloud (OIC) and EPM Automate Tool wouldtypically be responsible for the following:&lt;/span&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Designing PAAS solutions.&lt;/li&gt;&lt;li&gt;Designingand implementing integration solutions using Oracle Integration Cloud(OIC) and EPM Automate Tool for Oracle ERP Cloud.&lt;/li&gt;&lt;li&gt;Providingstrong Oracle ERP technical expertise in Oracle GL, AP and FAmodules/interfaces using PL/SQL and BIP reports. &lt;/li&gt;&lt;li&gt;Providingtechnical expertise and guidance on integration best practices, patternsand methodologies&lt;/li&gt;&lt;li&gt;Collaboratingwith other team members to understand requirements and design solutionsthat meet those needs&lt;/li&gt;&lt;li&gt;Troubleshootingand resolving technical issues related to integration solutions&lt;/li&gt;&lt;li&gt;Stayingup-to-date with new features and updates in Oracle EBS, Oracle IntegrationCloud (OIC) and other related technologies&lt;/li&gt;&lt;li&gt;Participatingin client meetings and presentations to discuss integration solutions andprovide demonstrations&lt;/li&gt;&lt;li&gt;Perform in-depth discovery of clients business,competitors and industry to provide best practices and competitiveanalysis.&lt;/li&gt;&lt;li&gt;Gather, evaluate, analyze, and document businessrequirements; translates business requirements into functional andtechnical specifications.&lt;/li&gt;&lt;li&gt;Exercisesjudgment and business acumen in selecting methods and techniques todeliver technical solutions on non-routine and very complex aspects ofapplications and technology installations.&lt;/li&gt;&lt;li&gt;Ensures that all integrations are up and running andtroubleshoot any issues during day-to-day operations&lt;/li&gt;&lt;li&gt;Technicalcoordination with third party service providers for Oracle applicationissues and code migrations&lt;/li&gt;&lt;li&gt;Provide technical estimates and Oracledesign recommendations for system enhancements.&lt;/li&gt;&lt;/ul&gt;&lt;ul&gt;&lt;li&gt;The ideal candidate for this role would have a strong backgroundin integration technologies, with experience working with Oracle EBS technologies, Oracle Integration Cloud (OIC) and other related technologiessuch as SOAP/REST, SFTP, XSLT, SQL and cloud integration&lt;/li&gt;&lt;li&gt;Experience with twofull life cycle implementations of Oracle EBS Financial modules with specialemphasis on General Ledger (GL), Accounts Payables (AP) and Fixed Assets (FA)&lt;/li&gt;&lt;li&gt;Experiencein Oracle EBS Reports, Interfaces, Conversions, Extensions, Forms and Workflowdevelopment&lt;/li&gt;&lt;li&gt;Experience using EBS development tools: BI Publisher, strong PL/SQL, SQL Plus, Alerts, EBS APIs, and UNIX Shell Scripting&lt;/li&gt;&lt;li&gt;Experience personalizingOracle Forms and Oracle Application Framework (OAF).&lt;/li&gt;&lt;li&gt;They should have strong problem-solving skills and the abilityto work well in a team environment.&lt;/li&gt;&lt;li&gt; Strong communication skills, both writtenand verbal, are also important in this role as the consultant will beinteracting with clients and other team members regularly.&lt;/li&gt;&lt;/ul&gt;&lt;/div&gt;&lt;div&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;div&gt;&lt;div&gt;&lt;/div&gt;&lt;/div&gt;&lt;br&gt;&lt;b&gt;Location - &lt;/b&gt;Pune,Nagar,Indore,Bengaluru&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Developer / Engineer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Infobeans&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Indore&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/810057/oracle-fusion-oci-engineer-at-infobeans/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/810057/oracle-fusion-oci-engineer-at-infobeans/</link>
  <title>[Full Time] Oracle Fusion OCI engineer at Infobeans</title>
  <dc:date>Mon, 02 Feb 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/811616/oracle-fusion-oci-engineer-at-infobeans/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt;&lt;div&gt;&lt;ul&gt;&lt;li&gt;AnOracle EBS &amp;amp; Cloud Senior Technical Consultant with expertise inintegrations, Oracle Integration Cloud (OIC) and EPM Automate Tool wouldtypically be responsible for the following:&lt;/li&gt;&lt;li&gt;DesigningPAAS solutions.&lt;/li&gt;&lt;li&gt;Designingand implementing integration solutions using Oracle Integration Cloud(OIC) and EPM Automate Tool for Oracle ERP Cloud.&lt;/li&gt;&lt;li&gt;Providingstrong Oracle ERP technical expertise in Oracle GL, AP and FAmodules/interfaces using PL/SQL and BIP reports. &lt;/li&gt;&lt;li&gt;Providingtechnical expertise and guidance on integration best practices, patternsand methodologies&lt;/li&gt;&lt;li&gt;Collaboratingwith other team members to understand requirements and design solutionsthat meet those needs&lt;/li&gt;&lt;li&gt;Troubleshootingand resolving technical issues related to integration solutions&lt;/li&gt;&lt;li&gt;Stayingup-to-date with new features and updates in Oracle EBS, Oracle IntegrationCloud (OIC) and other related technologies&lt;/li&gt;&lt;li&gt;Participatingin client meetings and presentations to discuss integration solutions andprovide demonstrations&lt;/li&gt;&lt;li&gt;Perform in-depth discovery of clients business,competitors and industry to provide best practices and competitiveanalysis.&lt;/li&gt;&lt;li&gt;Gather, evaluate, analyze, and document businessrequirements; translates business requirements into functional andtechnical specifications.&lt;/li&gt;&lt;li&gt;Exercisesjudgment and business acumen in selecting methods and techniques todeliver technical solutions on non-routine and very complex aspects ofapplications and technology installations.&lt;/li&gt;&lt;li&gt;Ensures that all integrations are up and running andtroubleshoot any issues during day-to-day operations&lt;/li&gt;&lt;li&gt;Technicalcoordination with third party service providers for Oracle applicationissues and code migrations&lt;/li&gt;&lt;li&gt;Provide technical estimates and Oracledesign recommendations for system enhancements.&lt;/li&gt;&lt;li&gt;The ideal candidate for this role would have a strong backgroundin integration technologies, with experience working with Oracle EBStechnologies, Oracle Integration Cloud (OIC) and other related technologiessuch as SOAP/REST, SFTP, XSLT, SQL and cloud integration. &lt;/li&gt;&lt;li&gt;Experience with twofull life cycle implementations of Oracle EBS Financial modules with specialemphasis on General Ledger (GL), Accounts Payables (AP) and Fixed Assets (FA). &lt;/li&gt;&lt;li&gt;Experiencein Oracle EBS Reports, Interfaces, Conversions, Extensions, Forms and Workflowdevelopment. Experience using EBS development tools: BI Publisher, strongPL/SQL, SQL Plus, Alerts, EBS APIs, and UNIX Shell Scripting. &lt;/li&gt;&lt;li&gt;Experience personalizingOracle Forms and Oracle Application Framework (OAF).&lt;/li&gt;&lt;li&gt;They should have strong problem-solving skills and the abilityto work well in a team environment. Strong communication skills, both writtenand verbal, are also important in this role as the consultant will beinteracting with clients and other team members regularly.&lt;/li&gt;&lt;/ul&gt;&lt;div&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;div&gt;&lt;div&gt;&lt;/div&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Developer / Engineer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Infobeans&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Indore&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/811616/oracle-fusion-oci-engineer-at-infobeans/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/811616/oracle-fusion-oci-engineer-at-infobeans/</link>
  <title>[Full Time] Oracle Fusion OCI Engineer at Infobeans</title>
  <dc:date>Fri, 30 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/810322/oracle-fusion-erp-a%c2%80%c2%93-security-analyst-at-infobeans/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt;&lt;div&gt;&lt;div&gt;&lt;ul&gt;&lt;li&gt;We are seeking an Oracle FusionCloud ERP (ERP) functional support to help implementing key Financialinitiatives on the Oracle platform due to rapid growth in XXX legal entitiesand ledgers, including the formation of Bank Holding Company and GroupCompanies. &lt;/li&gt;&lt;li&gt;The Oracle Fusion ERPSecurity Analyst role typically involves managing user access, roles,and security configurations within Oracle Cloud ERP systems, ensuring datasecurity, and compliance with relevant policies. &lt;/li&gt;&lt;li&gt;The role often requiresa combination of technical and functional expertise, including experiencewith Oracle Fusion Cloud applications, security concepts like and , andsecurity auditing. Project Management across the full SDLC is a plus forthis position. The scope of the rolesupports financial business users in North America and EMEA locations.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;span&gt;&lt;/span&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;b&gt;&lt;span&gt;Position Responsibilities/Duties&lt;/span&gt;&lt;/b&gt;&lt;/p&gt;&lt;p&gt;&lt;b&gt;&lt;/b&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Provisioning and managing useraccounts, roles, and permissions based on business requirements. Defining and implementingsecurity policies and access controls for Oracle Cloud applications. Conducting security assessmentsof Oracle Cloud environments and applications to identify vulnerabilities andensure compliance. Ensuring compliance withrelevant security standards, policies, and regulations (e.g., ,NYDFS, etc). Responding to securityincidents, coordinating with other teams to resolve issues, and implementingpreventative measures. Designing and implementingsecurity roles that align with business processes and adhere to leastprivilege principles. &lt;/li&gt;&lt;li&gt;Identifyingand mitigating potential SOD conflicts within the system. Creating and maintainingdocumentation for security configurations, policies, and procedures. &lt;/li&gt;&lt;li&gt;Collaboratingwith business users, IT teams, and other stakeholders to address security-relatedneeds. Ensuring that proposedsolutions comply with the companys technology direction. Ensuring compliance withcompanys change and security policies. Working closely with Oracle applicationteam (internal and external) on security design and related applicationconfiguration.&lt;/li&gt;&lt;li&gt;Providegeneral technical support for Oracle Fusion Cloud ERP system and usertraining Late night production supportand weekend implementation work will be requiredEnd user requirements management (20%)Designing, configuring, developing, testing, and improving Oracle Financialsplatform. (50%)Project Management (5%)Production support for ERP issues. (25%)&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;span&gt;&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;&lt;/span&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;b&gt;&lt;span&gt;Experience/Knowledge Requirements&lt;/span&gt;&lt;/b&gt;&lt;/p&gt;&lt;p&gt;&lt;b&gt;&lt;/b&gt;&lt;/p&gt;&lt;p&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Demonstrated hands ontechnical understanding of Oracle Fusion Cloud ERP concepts and generalmodule functionality Ability to clearly describeend-to-end business processes and the key integration points, &lt;/li&gt;&lt;li&gt;workcross-functionally across different business processes within anorganization, and knowledge of challenges faced by implementation teams Strong analytical, written,and verbal communication skills Project Management skillsusing MS Project a plus &lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;span&gt;&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;&lt;/span&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;b&gt;&lt;span&gt;Required Skills/Abilities&lt;/span&gt;&lt;/b&gt;&lt;/p&gt;&lt;p&gt;&lt;b&gt;&lt;/b&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Must have Strong understandingof Oracle Fusion Cloud security concepts, including the Security Console, andexperience with relevant tools and technologies. &lt;/li&gt;&lt;li&gt;Severalyears of experience with Oracle Fusion Cloud applications, includingexperience with security configuration, role-based access control (RBAC), andsegregation of duties (SOD). Excellent communication skillswith the ability to drive consensus and absorb and present complex ideas in asuccinct and accurate manner. &lt;/li&gt;&lt;li&gt;Have a team oriented approach Minimum 5 years of experiencein Oracle Fusion Cloud ERP experience is a must Experience in finance andaccounting industry a plus Should have strong Englishcommunication and writing skills Ability to work independentlyand as part of a team Excellent troubleshooting andproblem solving abilities Must be willing to work in anenergetic, fast paced and team-oriented development environment Ability to manage multiplepriorities effectively is a necessity &lt;/li&gt;&lt;li&gt;Minimum 3+ year TOAD/SQLexperience is a must University Degree/Preferably in a field related to computerscience/software engineering Oracle Fusion Cloud certifications a plus&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;span&gt;&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;&lt;/span&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;b&gt;&lt;span&gt;Skills and Special Requirements (heavy lifting, excessive overtime, etc.)&lt;/span&gt;&lt;/b&gt;&lt;/p&gt;&lt;p&gt;&lt;b&gt;&lt;/b&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Must be willing to work some weekends and late nights to support production, upgrades,changes, and user support via on-call rotation.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;span&gt;&lt;/span&gt;&lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;div&gt;&lt;div&gt;&lt;/div&gt;&lt;/div&gt;&lt;br&gt;&lt;b&gt;Location - &lt;/b&gt;Pune,Nagar,Indore,Bengaluru&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Infobeans&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Indore&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/810322/oracle-fusion-erp-a%c2%80%c2%93-security-analyst-at-infobeans/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/810322/oracle-fusion-erp-a%c2%80%c2%93-security-analyst-at-infobeans/</link>
  <title>[Full Time] Oracle Fusion ERP  Security Analyst at Infobeans</title>
  <dc:date>Fri, 30 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/809842/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;PostgreSQL&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NAMinimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;br&gt;&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration. You will collaborate with various teams to ensure that the data architecture aligns with business objectives and technical specifications, while also addressing any challenges that arise in the data management process. Your role will be pivotal in establishing a robust data framework that supports the organization&apos;&apos;s goals and enhances data accessibility and usability across different platforms. &lt;b&gt;Roles &amp;amp; Responsibilities:-&lt;/b&gt; Expected to be an SME.- Collaborate and manage the team to perform.- Responsible for team decisions.- Engage with multiple teams and contribute on key decisions.- Provide solutions to problems for their immediate team and across multiple teams.- Facilitate knowledge sharing sessions to enhance team capabilities.- Develop and maintain documentation related to data architecture and design. &lt;b&gt;Professional &amp;amp; Technical Skills:-&lt;/b&gt; &lt;br&gt;Must To Have Skills:&lt;br&gt;Proficiency in PostgreSQL.- Strong understanding of data modeling techniques and best practices.- Experience with data integration tools and ETL processes.- Familiarity with cloud-based data storage solutions.- Ability to optimize database performance and scalability. &lt;b&gt;Additional Information:-&lt;/b&gt; The candidate should have minimum 7.5 years of experience in PostgreSQL.- This position is based at our Hyderabad office.- A 15 years full time education is required.Qualification15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/809842/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/809842/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Fri, 30 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/809809/gcp-data-architect-at-capco/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; Key Responsibilities &lt;/div&gt; &lt;div&gt; &amp;nbsp; &lt;/div&gt; &lt;div&gt; Lead end-to-end GCP data platform implementations, focusing on BigQuery for analytics, Pub/Sub for real-time messaging, and ETL pipelines using Dataflow, Composer, or custom tools.Architect and optimize high-volume data ingestion, transformation, and querying workflows to support business intelligence and ML workloads. &lt;/div&gt; &lt;div&gt; &amp;nbsp; &lt;/div&gt; &lt;div&gt; Collaborate with cross-functional stakeholders including product owners, data scientists, and executives to gather requirements, define roadmaps, and deliver value through clear communication and presentations.Mentor junior engineers on GCP services, conduct code reviews, and enforce best practices for security, cost optimization, and scalability.Troubleshoot complex issues in production environments, perform performance tuning, and integrate with hybrid/multi-cloud setups.Drive migration projects from on-premise or other clouds (e.g., AWS/Cloudera) to GCP, ensuring minimal downtime and data integrity. &lt;/div&gt; &lt;div&gt; &amp;nbsp; &lt;/div&gt; &lt;div&gt; &amp;nbsp; &lt;/div&gt; &lt;div&gt; Required Qualifications10-12 years of total experience in IT/cloud consulting, with at least 5+ years hands-on with GCP (BigQuery, Pub/Sub, Dataflow/ETL).Bachelors or Masters degree in Computer Science, Engineering, or related field.Proven track record leading GCP projects with measurable outcomes (e.g., reduced ETL latency by 40% or scaled Pub/Sub to millions of events/sec).Strong communication skills for stakeholder management, including executive presentations and agile ceremonies.GCP certifications (e.g., Professional Data Engineer, Professional Cloud Architect) preferred. &lt;/div&gt; &lt;div&gt; &amp;nbsp; &lt;/div&gt; &lt;div&gt; Preferred SkillsExperience with Dataproc, Vertex AI, or integration with tools like Snowflake / Databricks.Familiarity with CI/CD (Cloud Build), IAM, and monitoring (Cloud Monitoring/Logging).Knowledge of data governance, compliance (GDPR), and cost management in GCP. &lt;/div&gt; &lt;div&gt; &lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Banking&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Capco&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/809809/gcp-data-architect-at-capco/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/809809/gcp-data-architect-at-capco/</link>
  <title>[Full Time] GCP Data Architect at Capco</title>
  <dc:date>Fri, 30 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/810210/database-administrator-at-sunquest-information/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;ul&gt; &lt;li&gt; Purpose. &lt;/li&gt; &lt;li&gt; In this role you will get the opportunity to work with Clinisys which is a global leader in healthcare information technology. &lt;/li&gt; &lt;li&gt; As a part of the Solution Adoption team, you will act as a liaison between the business and its customers and will be responsible to deliver impeccable service by providing high quality software solutions. &lt;/li&gt; &lt;li&gt; As an Oracle DBA you will perform essential database functions from development through production including but not limited to database installations, upgrades, troubleshooting, and conversions for both client based and cloud hosted products. &lt;/li&gt; &lt;li&gt; Additionally, in this role you will be accountable for documentation for all database functions. &lt;/li&gt; &lt;li&gt; Essential Functions/ Job Responsibilities. &lt;/li&gt; &lt;li&gt; Install Oracle database patches and troubleshoot various issues related to database patching. &lt;/li&gt; &lt;li&gt; Perform production database health check. &lt;/li&gt; &lt;li&gt; Detect potential issues that may impact production database operation and resolve the issues. &lt;/li&gt; &lt;li&gt; Performs database support for both production and development environments, including the installation of databases, back-ups, replication, restores, patches, and upgrades. &lt;/li&gt; &lt;li&gt; Works closely with product development teams to create and manage product databases, solve problems, and optimize performance. &lt;/li&gt; &lt;li&gt; Provide product database support for client installations and knowledge of applying security patches on Oracle Databases. &lt;/li&gt; &lt;li&gt; Ability to configure replication as a part of disaster recovery process. &lt;/li&gt; &lt;li&gt; Identify and resolve database issues that impact application performance. &lt;/li&gt; &lt;li&gt; Follows appropriate sign-off and escalation procedures for database changes and version upgrades. &lt;/li&gt; &lt;li&gt; Document database installations, replications, upgrades, and conversions. &lt;/li&gt; &lt;li&gt; Stay ahead of the latest database versions and features. &lt;/li&gt; &lt;li&gt; Qualify latest database versions for application use and recommend and implement emerging database technologies. &lt;/li&gt; &lt;li&gt; Craft and deliver training materials as assigned. &lt;/li&gt; &lt;li&gt; Prepare reports or correspondence concerning project specifications, activities, or status. &lt;/li&gt; &lt;li&gt; All other duties and responsibilities as assigned. &lt;/li&gt; &lt;li&gt; Strong command of Oracle, Oracle server tools and Oracle Data Guard. &lt;/li&gt; &lt;li&gt; Advanced knowledge of database security, backup and recovery, performance monitoring and tuning standards. &lt;/li&gt; &lt;li&gt; Understanding of relational and dimensional data modelling. &lt;/li&gt; &lt;li&gt; Strong mathematical and statistical knowledge. &lt;/li&gt; &lt;li&gt; Impeccable attention to detail. &lt;/li&gt; &lt;li&gt; Develop processes for optimizing database security. &lt;/li&gt; &lt;li&gt; Create and manage database reports, visualizations, and dashboards. &lt;/li&gt; &lt;li&gt; Create automation for repeating database tasks. &lt;/li&gt; &lt;li&gt; Be available for on-call support as needed. &lt;/li&gt; &lt;li&gt; Skills Needed To Be Successful. &lt;/li&gt; &lt;li&gt; Deep knowledge of Oracle servers/databases and Oracle database development and Oracle Data Guard. &lt;/li&gt; &lt;li&gt; Deep knowledge in both OLTP and OLAP database designs and administration. &lt;/li&gt; &lt;li&gt; Proven System Administration skills. &lt;/li&gt; &lt;li&gt; Innovative thinker with a commitment to improve processes and methodologies. &lt;/li&gt; &lt;li&gt; Strong written and verbal communications skills. &lt;/li&gt; &lt;li&gt; Excellent client management &amp;amp; client service skills. &lt;/li&gt; &lt;li&gt; Create/Maintain Oracle Database Projects for development. &lt;/li&gt; &lt;li&gt; Knowledge and experience preferred with AI-driven development and automation tools such as GitHub Copilot, Copilot Studio, and related platforms to optimize database administration tasks, implement intelligent query tuning, and enhance operational efficiency. &lt;/li&gt; &lt;li&gt; Demonstrated understanding of AI concepts including Model Context Protocol (MCP), Prompt Engineering, and integration of AI-assisted workflows into DevOps and CI/CD pipelines. &lt;/li&gt; &lt;li&gt; Required Experience &amp;amp; Education. &lt;/li&gt; &lt;li&gt; Bachelor&apos;s degree in business or computer science, or equivalent education and work experience combined. &lt;/li&gt; &lt;li&gt; Minimum 3 to 5 years of Oracle database administration and Oracle Data Guard experience. &lt;/li&gt; &lt;li&gt; Prior exposure to software development processes and/or methodologies. &lt;/li&gt; &lt;li&gt; Familiarity with on premise and cloud-based implementations. &lt;/li&gt; &lt;li&gt; Preferred Experience &amp;amp; Education. &lt;/li&gt; &lt;li&gt; Database certification(s) preferred. &lt;/li&gt; &lt;li&gt; Prior experience with server class hardware, operating systems, and virtualization. &lt;/li&gt; &lt;li&gt; Supervisory Responsibilities. &lt;/li&gt; &lt;li&gt; None. &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;/div&gt;UnstructuredText, LongMonolithicParagraph&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Software Product&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Sunquest Information&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/810210/database-administrator-at-sunquest-information/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/810210/database-administrator-at-sunquest-information/</link>
  <title>[Full Time] Database Administrator at Sunquest Information</title>
  <dc:date>Wed, 28 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/810813/architect-atc-at-virtusa/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt;&lt;div&gt;&lt;div&gt;&lt;li&gt;&lt;strong&gt;Key Responsibilities&lt;/strong&gt;&lt;/li&gt;&lt;/div&gt;&lt;p&gt;&lt;/p&gt;&lt;div&gt;&lt;li&gt;Banking-Specific Data Model Design:?Design and create conceptual, logical, and physical data models for retail banking systems, including core banking platforms, customer relationship management (CRM), anti-money laundering (AML), loan origination, and credit card processing systems.&lt;/li&gt;&lt;li&gt;Regulatory &amp;amp; Compliance Modeling:?Ensure all data models adhere to stringent banking regulations, such as?Basel III,?BCBS 239,?GDPR, and local financial compliance laws, incorporating necessary fields for risk reporting and auditing.&lt;/li&gt;&lt;li&gt;Business Requirements Analysis:?Collaborate with product teams, risk management, finance departments, and compliance officers to gather and analyze data requirements specific to retail banking operations (e.g., customer accounts, transactions, loan products, mortgage data).&lt;/li&gt;&lt;li&gt;Metadata Management &amp;amp; Data Lineage:?Utilize erwin DM and the erwin Mart to manage the repository of banking data assets. Develop and maintain robust data dictionaries and detailed data lineage from source systems (like teller systems or online banking apps) to target analytics platforms.&lt;/li&gt;&lt;li&gt;Database Implementation &amp;amp; Optimization:?Work with DBAs and data engineers to implement physical models across various platforms (e.g., Oracle, SQL Server, Snowflake), ensuring high performance and data integrity for high-volume transaction processing systems.&lt;/li&gt;&lt;li&gt;Forward &amp;amp; Reverse Engineering:?Employ erwin??s capabilities to reverse engineer legacy banking systems to ensure proper documentation, and forward engineer new systems to replace outdated structures.&lt;/li&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Banking&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Virtusa&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/810813/architect-atc-at-virtusa/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/810813/architect-atc-at-virtusa/</link>
  <title>[Full Time] Architect (ATC) at Virtusa</title>
  <dc:date>Wed, 28 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/809825/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt; &lt;b&gt;About The Role &lt;/b&gt; &lt;b&gt; &lt;br&gt;Project Role :&lt;/b&gt;Data Architect &lt;br&gt; &lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;b&gt;Must have skills :&lt;/b&gt;Reltio &lt;b&gt; &lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required &lt;b&gt; &lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education &lt;br&gt; &lt;b&gt;Summary&lt;/b&gt;:As a Data Architect/MDM Architect is a senior technology leader responsible for designing, building, and overseeing the enterprise&apos;s Master Data Management solution. This role is pivotal in establishing a &quot;single source of truth&quot; for critical data domains such as Customer, Product, Vendor, and Employee.You will be the chief technical authority on MDM, defining the end-to-end architecture, data models, governance processes, and integration patterns. You will collaborate closely with business stakeholders, data stewards, and IT teams to ensure the MDM solution aligns with strategic business objectives and drives data quality, consistency, and accessibility across the organization. Key Responsibilities Strategy &amp;amp; Architecture:Develop and own the enterprise MDM strategy, solution architecture, and implementation roadmap.Design scalable, high-performance MDM solutions (on-premise, cloud, or hybrid) that encompass data modeling, data quality, data governance, and data integration.Define the canonical data models, match/merge rules, survivorship logic, and data stewardship workflows for all master data domains.Evaluate and help DEV team implement Reltio MDM - and best practices.Implementation &amp;amp; Integration:Lead the technical design and implementation of the Reltio MDM platform.Architect and design real-time (API) and batch integration patterns to synchronize master data between the MDM hub and critical enterprise systems (e.g., ERP, CRM, data warehouse).Oversee the configuration of the Reltio MDM solution to meet business requirements.Data Governance &amp;amp; Quality:Collaborate with the Data Governance Office and business data stewards to define and enforce data quality rules, standards, and policies within the MDM solution.Design data quality dashboards and reports to monitor the health of master data.Establish and manage the technical processes for data stewardship, including issue resolution and change management.Leadership &amp;amp; Collaboration:Act as the primary technical liaison between business stakeholders, data governance teams, and development teams.Translate complex business requirements into scalable technical specifications.Mentor and provide technical guidance to ETL Integrations data engineers, developers, and Reltio MDM analysts.Champion the value of master data management and data governance across the organization. Skills and &lt;br&gt;Qualifications Required &lt;br&gt;Qualifications Experience:8-12+ years of experience in data management, data architecture, or enterprise architecture.5+ years of hands-on experience designing and implementing large-scale MDM solutions.Technical &lt;br&gt; &lt;b&gt; &lt;br&gt;&lt;/b&gt; &lt;b&gt;Skills:&lt;/b&gt; &lt;li&gt;Expert-level knowledge of at least one major MDM Reltio.Deep understanding of MDM concepts:data modeling, matching, merging, hierarchy management, data quality, and stewardship.Strong experience with data integration technologies and patterns (ETL/ELT, APIs, message queues, web services).Proficiency in SQL and data modeling (conceptual, logical, physical).Experience with one or more major cloud platforms (AWS, Azure, or GCP) and their data services.Soft &lt;br&gt; &lt;b&gt; &lt;br&gt;&lt;/b&gt; &lt;b&gt;Skills:&lt;/b&gt; &lt;/li&gt; &lt;li&gt;Excellent communication and stakeholder management:Ability to explain complex technical concepts to non-technical audiences.Strong leadership and mentoring skills.Strategic thinker with strong analytical and problem-solving abilities Preferred (Nice-to-Have) &lt;br&gt;Qualifications Experience with multiple MDM platforms.Hands-on experience with data engineering tools (e.g., Python, Spark, Databricks).Knowledge of data governance frameworks. Familiarity with data catalog and metadata management tools (e.g., Collibra, Alation).Professional certifications (e.g., TOGAF, DAMA, cloud provider certifications, or tool-specific MDM certifications).Experience in a specific industry (e.g., Finance, Healthcare, Retail) and its related data domains (e.g., Patient, Financial Instrument).Additional Information:&lt;/li&gt;&lt;li&gt;The candidate should have minimum 12 years of experience in Reltio.&lt;/li&gt; &lt;li&gt;This position is based at our Hyderabad office.&lt;/li&gt; &lt;li&gt;A 15 years full time education is required. &lt;b&gt;Qualification&lt;/b&gt;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/809825/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/809825/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Wed, 28 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/808296/support-engineer-iii-just-walk-out-tech-at-amazon/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;ul&gt; &lt;li&gt; Description. &lt;/li&gt; &lt;li&gt; As part of the AWS Applied AI Solutions organization, we have a vision to provide business applications, leveraging Amazons unique experience and expertise, that are used by millions of companies worldwide to manage day-to-day operations. &lt;/li&gt; &lt;li&gt; We will accomplish this by accelerating our customers businesses through delivery of intuitive and differentiated technology solutions that solve enduring business challenges. &lt;/li&gt; &lt;li&gt; We blend vision with curiosity and Amazons real-world experience to build opinionated, turnkey solutions. &lt;/li&gt; &lt;li&gt; Where customers prefer to buy over build, we become their trusted partner with solutions that are no-brainers to buy and easy to use. &lt;/li&gt; &lt;li&gt; Join us on our mission to revolutionize the way the world shops. &lt;/li&gt; &lt;li&gt; We are the Amazon Physical Stores Technical Operations team, responsible for launching and operating the commerce software services which run stores with its Just Walk Out technology. &lt;/li&gt; &lt;li&gt; Our approach to problems is entrepreneurial. &lt;/li&gt; &lt;li&gt; You will be challenged to invent, create and solve challenging problems as well as wear many hats and collaborate in an environment thats more startup than big company. &lt;/li&gt; &lt;li&gt; You will need a strong ability to troubleshoot hardware, software, and network issues. &lt;/li&gt; &lt;li&gt; As a Support Engineer (Technical Operations Center Engineer), you seek resolution to problems and mitigate risk, always ensuring a Customer Obsessed experience has occurred. &lt;/li&gt; &lt;li&gt; You will be working on services with a direct impact on the customer experience. &lt;/li&gt; &lt;li&gt; If you are excited about the opportunity to learn and work on distributed systems, enjoy trouble shooting and solving complex problems, consider the opportunities to work with Amazon Physical Stores. &lt;/li&gt; &lt;li&gt; You will help solve a variety of challenges and offer your expertise in growing the knowledge of your peers via team collaboration. &lt;/li&gt; &lt;li&gt; You will be counted on to identify areas of improvement and drive projects to implement them. &lt;/li&gt; &lt;li&gt; We consistently whiteboard so be comfortable writing and supporting your ideas on the team board. &lt;/li&gt; &lt;li&gt; You will play an active role in defining the support processes for technologies in partnership with other technology leaders within and possibly outside the team. &lt;/li&gt; &lt;li&gt; You should be comfortable with a level of ambiguity thats higher than most projects and relish the idea of solving big challenges. &lt;/li&gt; &lt;li&gt; You will also mentor other engineers in your area of expertise. &lt;/li&gt; &lt;li&gt; Along the way, we guarantee that youll work hard, have fun and impact many customers!. &lt;/li&gt; &lt;li&gt; This role requires the flexibility to work 5 days a week (occasionally on weekends) on a rotational basis. &lt;/li&gt; &lt;li&gt; AWS Support is 24x7x365 operations and work timings for this role is in India night time ie 10 PM to 6 AM IST or 1 PM to 10 PM IST. &lt;/li&gt; &lt;li&gt; You are expected to work in night shifts hours based on business requirements. &lt;/li&gt; &lt;li&gt; About The Team. &lt;/li&gt; &lt;li&gt; Diverse Experiences. &lt;/li&gt; &lt;li&gt; Amazon values diverse experiences. &lt;/li&gt; &lt;li&gt; Even if you do not meet all of the preferred qualifications and skills listed in the job description, we encourage candidates to apply. &lt;/li&gt; &lt;li&gt; If your career is just starting, hasnt followed a traditional path, or includes alternative experiences, dont let it stop you from applying. &lt;/li&gt; &lt;li&gt; Why AWS. &lt;/li&gt; &lt;li&gt; Amazon Web Services (AWS) is the worlds most comprehensive and broadly adopted cloud platform. &lt;/li&gt; &lt;li&gt; We pioneered cloud computing and never stopped innovating thats why customers from the most successful startups to Global 500 companies trust our robust suite of products and services to power their businesses. &lt;/li&gt; &lt;li&gt; Work/Life Balance. &lt;/li&gt; &lt;li&gt; We value work-life harmony. &lt;/li&gt; &lt;li&gt; Achieving success at work should never come at the expense of sacrifices at home, which is why we strive for flexibility as part of our working culture. &lt;/li&gt; &lt;li&gt; When we feel supported in the workplace and at home, theres nothing we cant achieve. &lt;/li&gt; &lt;li&gt; Inclusive Team Culture. &lt;/li&gt; &lt;li&gt; AWS values curiosity and connection. &lt;/li&gt; &lt;li&gt; Our employee-led and company-sponsored affinity groups promote inclusion and empower our people to take pride in what makes us unique. &lt;/li&gt; &lt;li&gt; Our inclusion events foster stronger, more collaborative teams. &lt;/li&gt; &lt;li&gt; Our continual innovation is fueled by the bold ideas, fresh perspectives, and passionate voices our teams bring to everything we do. &lt;/li&gt; &lt;li&gt; Mentorship and Career Growth. &lt;/li&gt; &lt;li&gt; Were continuously raising our performance bar as we strive to become Earths Best Employer. &lt;/li&gt; &lt;li&gt; Thats why youll find endless knowledge-sharing, mentorship and other career-advancing resources here to help you develop into a better-rounded professional. &lt;/li&gt; &lt;li&gt; Basic Qualifications. &lt;/li&gt; &lt;li&gt; 2+ years of software development, or 2+ years of technical support experience. &lt;/li&gt; &lt;li&gt; Experience scripting in modern program languages. &lt;/li&gt; &lt;li&gt; Experience troubleshooting and debugging technical systems. &lt;/li&gt; &lt;li&gt; Preferred Qualifications. &lt;/li&gt; &lt;li&gt; Knowledge of web services, distributed systems, and web application development. &lt;/li&gt; &lt;li&gt; Experience troubleshooting &amp;amp; maintaining hardware &amp;amp; software RAID. &lt;/li&gt; &lt;li&gt; Experience with REST web services, XML, JSON. &lt;/li&gt; &lt;li&gt; Our inclusive culture empowers Amazonians to deliver the best results for our customers. &lt;/li&gt; &lt;li&gt; If you have a disability and need a workplace accommodation or adjustment during the application and hiring process, including support for the interview or onboarding process, please for more information. &lt;/li&gt; &lt;li&gt; If the country/region youre applying in isnt listed, please contact your Recruiting Partner. &lt;/li&gt; &lt;li&gt; Company Amazon Dev Center India Hyderabad. &lt;/li&gt; &lt;li&gt; Job ID: A3160217. &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Internet&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Amazon&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/808296/support-engineer-iii-just-walk-out-tech-at-amazon/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/808296/support-engineer-iii-just-walk-out-tech-at-amazon/</link>
  <title>[Full Time] Support Engineer III, Just Walk Out Tech at Amazon</title>
  <dc:date>Fri, 23 Jan 2026 14:17:04 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/808725/senior-software-engineer-at-walmart/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Position Summary...&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;What youll do...&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;p&gt; &lt;b&gt;About Team&lt;/b&gt; &lt;/p&gt; &lt;p&gt;Walmart s Enterprise Business Services (EBS) is a powerhouse of several exceptional teams delivering world-class technology solutions and services making a profound impact at every level of Walmart. &lt;/p&gt; &lt;p&gt;As a key part of Walmart Global Tech, our teams set the bar for operational excellence and leverage emerging technology to support millions of customers, associates, and stakeholders worldwide. Each time an associate turns on their laptop, a customer makes a purchase, a new supplier is onboarded, the company closes the books, physical and legal risk is avoided, and when we pay our associates consistently and accurately, that is EBS. Joining EBS means embarking on a journey of limitless growth, relentless innovation, and the chance to set new industry standards that shape the future of Walmart. &lt;/p&gt; &lt;p&gt; &lt;b&gt;What you will do&lt;/b&gt; &lt;/p&gt; &lt;p&gt; &lt;b&gt;System Administration &amp;amp; Operations&lt;/b&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Lead administration of SAP NetWeaver (ECC) and SAP S/4HANA systems.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Lead ECC, BW, MDG, SLT and S/4HANA technical administration and landscape strategy, including upgrades, technical transformations, integrations, and special requirements.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Provide hands-on leadership for SAP HANA administration, performance tuning, and troubleshooting.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Provide HANA technical governance including capacity planning, performance trend analysis, and issue resolution guidance.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Govern SAP transport management using STMS and Solution Manager ChaRM.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Own and design enterprise monitoring and operations architecture using:&lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;SAP Focused Run for centralized, large-scale monitoring&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;SAP Solution Manager for ChaRM, monitoring, and transition scenarios&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;SAP Cloud ALM for cloud-centric landscapes&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Oversee daily SAP Basis operations, including monitoring, reporting, housekeeping, and capacity planning.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Plan and execute SAP kernel upgrades, support package stacks, and patching activities with minimal business disruption.&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;b&gt;Security, Compliance &amp;amp; Vulnerability Management&lt;/b&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Define and govern vulnerability management strategy, including:&lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Risk-based assessment and prioritization of SAP Security Notes&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Kernel, support pack, and OS patch governance&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Integration of vulnerability remediation with change and release management&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Architect SAP security fundamentals, including:&lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;SSL and certificate lifecycle management strategy&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Authentication patterns using SAML and OAuth&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Act as final escalation point for complex, cross-layer security and production incidents.&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;b&gt;Troubleshooting &amp;amp; Integration&lt;/b&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Drive complex troubleshooting across SAP Fiori, SAP GUI, Enterprise Portal, printers, and interface components.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Lead integration support involving PI/PO, BW, Portal, ADS, SSO, and SAML 2.0.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Own Root Cause Analysis (RCA) for major incidents and implement preventive actions.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Provide architectural oversight for SAP BODS landscapes, including stability, performance, and patch strategy.&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;b&gt;Cloud, Automation &amp;amp; Optimization&lt;/b&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Identify and drive automation, performance optimization, and cost optimization initiatives across SAP systems and infrastructure.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Provide technical leadership for SAP workloads on cloud platforms (Azure / GCP).&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Define hybrid SAP platform standards for high availability, disaster recovery, backup, and cloud cost optimization.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Guide teams on infrastructure automation and DevOps practices using Terraform, Ansible, scripting (Shell/Python), and CI/CD pipelines.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Drive automation-first engineering practices, including:&lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Patch orchestration, validation automation, and compliance reporting&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Rule-based or AI-assisted automation for alert triage and self-healing&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;b&gt;Operating Systems &amp;amp; Infrastructure&lt;/b&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Lead SAP operations in Linux and Windows environments.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Collaborate with infrastructure teams on networking, load balancers, firewalls, storage, and OS-level administration.&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;b&gt;Stakeholder &amp;amp; Team Leadership&lt;/b&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Mentor and guide SAP Basis administrators, ensuring skill development and operational excellence.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Define operational, security, and compliance standards for SAP platforms.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Collaborate closely with application, infrastructure, security, and business teams to drive issues to closure.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Communicate effectively with senior stakeholders, providing clear status updates, risk assessments, and improvement plans.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Maintain a customer-centric approach focused on delivering reliable and measurable outcomes.&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;b&gt;What you will bring&lt;/b&gt; &lt;/p&gt; &lt;p&gt; &lt;b&gt;Core Experience &amp;amp; Expertise&lt;/b&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;10 12+ years of SAP Basis and SAP infrastructure experience with deep ECC and S/4HANA ownership.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Expert knowledge of SAP ECC, S/4HANA, BW/4HANA, PI/PO, CAR, Solution Manager (ChaRM), SAP LaMa, SAP BTP, SAP Cloud Connector, SAP Enterprise Portal, and SAP Fiori.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Proven leadership of complex SAP upgrades, migrations, or landscape redesigns.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Working (non-DBA) knowledge of SAP HANA.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Excellent troubleshooting skills across SAP, OS, database, network, and integration layers.&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;b&gt;Cloud, Monitoring &amp;amp; Automation&lt;/b&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Experience running SAP at scale on Azure and/or GCP.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Strong hands-on expertise with SAP Focused Run.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Clear understanding of Solution Manager vs Focused Run vs Cloud ALM positioning.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Proficiency in Terraform, Ansible, scripting (Shell/Python), and CI/CD pipelines.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Strong scripting and automation architecture experience.&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;b&gt;Security &amp;amp; Integration&lt;/b&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Strong knowledge of SAP Security Notes, SSL, SAML, OAuth, and SAP security architecture.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Experience governing SAP BODS platforms.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Experience with infrastructure components: networking, load balancers, firewalls, storage systems, and OS-level administration (Linux/Unix).&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;b&gt;Preferred / Nice-to-have&lt;/b&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Experience supporting or leading S/4HANA conversions.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Exposure to Edge Integration Cell or hybrid integration runtimes.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;SAP or cloud architect certifications.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Experience with AI/ML-driven operations or observability platforms.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Experience in high-availability, retail-scale or enterprise-scale environments&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;b&gt;About Walmart Global Tech&lt;/b&gt; &lt;br&gt;Imagine working in an environment where one line of code can make life easier for hundreds of millions of people. That s what we do at Walmart Global Tech. We re a team of software engineers, data scientists, cybersecurity experts and service professionals within the world s leading retailer who make an epic impact and are at the forefront of the next retail disruption. People are why we innovate, and people power our innovations. We are people-led and tech-empowered.&lt;/p&gt; &lt;p&gt;We train our team in the skillsets of the future and bring in experts like you to help us grow. We have roles for those chasing their first opportunity as well as those looking for the opportunity that will define their career. Here, you can kickstart a great career in tech, gain new skills and experience for virtually every industry, or leverage your expertise to innovate at scale, impact millions and reimagine the future of retail.&lt;/p&gt; &lt;p&gt;Walmart s culture sets us apart, and we know being together helps us innovate, learn and grow great careers. This role is based in our [Bangalore/Chennai] office for daily work, with the flexibility for associates to manage their personal lives.l lives.&lt;/p&gt; &lt;p&gt; &lt;b&gt;Benefits&lt;/b&gt; &lt;/p&gt; &lt;p&gt;Beyond our great compensation package, you can receive incentive awards for your performance. Other great perks include a host of best-in-class benefits maternity and parental leave, PTO, health benefits, and much more.&lt;/p&gt; &lt;p&gt; &lt;b&gt;Belonging&lt;/b&gt; &lt;/p&gt; &lt;p&gt;We aim to create a culture where every associate feels valued for who they are, rooted in respect for the individual. Our goal is to foster a sense of belonging, to create opportunities for all our associates, customers and suppliers, and to be a Walmart for everyone.&lt;/p&gt; &lt;p&gt;At Walmart, our vision is &quot;everyone included.&quot; By fostering a workplace culture where everyone is and feels included, everyone wins. Our associates and customers reflect the makeup of all 19 countries where we operate. By making Walmart a welcoming place where all people feel like they belong, we re able to engage associates, strengthen our business, improve our ability to serve customers, and support the communities where we operate.&lt;/p&gt; &lt;p&gt; &lt;b&gt;Equal Opportunity Employer&lt;/b&gt; &lt;/p&gt; &lt;p&gt;Walmart, Inc., is an Equal Opportunities Employer By Choice. We believe we are best equipped to help our associates, customers and the communities we serve live better when we really know them. That means understanding, respecting and valuing unique styles, experiences, identities, ideas and opinions while being inclusive of all people.&lt;/p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; &lt;span&gt;Minimum Qualifications...&lt;/span&gt; &lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;i&gt; &lt;span&gt; &lt;i&gt;Outlined below are the required minimum qualifications for this position. If none are listed, there are no minimum qualifications. &lt;/i&gt; &lt;/span&gt; &lt;/i&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; Option 1: Bachelors degree in computer science, computer engineering, computer information systems, software engineering, or related area and 3 years experience in software engineering or related area.&lt;br&gt;Option 2: 5 years experience in software engineering or related area. &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; &lt;span&gt;Preferred Qualifications...&lt;/span&gt; &lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;i&gt; &lt;i&gt;Outlined below are the optional preferred qualifications for this position. If none are listed, there are no preferred qualifications. &lt;/i&gt; &lt;/i&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; Master s degree in computer science, information technology, engineering, information systems, cybersecurity, or related area and 1 year s experience leading information security or cybersecurity projects, We value candidates with a background in creating inclusive digital experiences, demonstrating knowledge in implementing Web Content Accessibility Guidelines (WCAG) 2.2 AA standards, assistive technologies, and integrating digital accessibility seamlessly. The ideal candidate would have knowledge of accessibility best practices and join us as we continue to create accessible products and services following Walmart s accessibility standards and guidelines for supporting an inclusive culture. Information Technology - CISCO Certification - Certification &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; &lt;span&gt;Primary Location...&lt;/span&gt; &lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; Pardhanani Wilshire Ii, Cessna Business Park, Kadubeesanahalli Village, Varthur Hobli , India&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Walmart&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Chennai&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/808725/senior-software-engineer-at-walmart/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/808725/senior-software-engineer-at-walmart/</link>
  <title>[Full Time] Senior, Software Engineer at Walmart</title>
  <dc:date>Fri, 23 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/808808/custom-software-engineer-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Custom Software Engineer&lt;br&gt; &lt;b&gt;Project Role Description :&lt;/b&gt;Analyze, design, code and test multiple components of application code across one or more clients. Perform maintenance, enhancements and/or development work. &lt;br&gt; &lt;b&gt;Must have skills :&lt;/b&gt;Oracle Procedural Language Extensions to SQL (PLSQL)&lt;br&gt; &lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;3&lt;/b&gt; year(s) of experience is required&lt;br&gt; &lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Software Development Engineer, your typical day will involve analyzing, designing, coding, and testing various components of application code for multiple clients. You will engage in maintenance and enhancement tasks, ensuring that the applications function optimally and meet client requirements. Collaboration with team members will be essential as you contribute to the development process and address any challenges that arise. &lt;b&gt;Roles &amp;amp; Responsibilities:&lt;/b&gt;- Expected to perform independently and become an SME.- Required active participation/contribution in team discussions.- Contribute in providing solutions to work related problems.- Collaborate with cross-functional teams to gather requirements and provide technical insights.- Document code changes and maintain clear communication with stakeholders regarding project status. &lt;b&gt;&lt;br&gt;Professional &amp;amp; Technical Skills:-&lt;/b&gt; &lt;br&gt;Must To Have Skills:&lt;br&gt;Proficiency in Oracle Procedural Language Extensions to SQL (PLSQL).- Strong understanding of database design and optimization techniques.- Experience with application development lifecycle methodologies.- Familiarity with version control systems such as Git.- Ability to troubleshoot and resolve software defects efficiently. &lt;b&gt;Additional Information:-&lt;/b&gt; The candidate should have minimum 3 years of experience in Oracle Procedural Language Extensions to SQL (PLSQL).- This position is based at our Bengaluru office.- A 15 years full time education is required.&lt;br&gt; Qualification15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Developer / Engineer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/808808/custom-software-engineer-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/808808/custom-software-engineer-at-accenture/</link>
  <title>[Full Time] Custom Software Engineer at Accenture</title>
  <dc:date>Fri, 23 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/808970/data-architect-at-accenture-hr-aditi/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt; &lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt; &lt;b&gt;Must have skills :&lt;/b&gt;AI &amp;amp; Data Solution Architecture&lt;br&gt; &lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;12&lt;/b&gt; year(s) of experience is required&lt;br&gt; &lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the architecture aligns with business needs and technical specifications. You will collaborate with various teams to ensure that data flows seamlessly across systems, while also addressing any challenges that arise in the data architecture process. Your role will be pivotal in shaping the data landscape of the organization, enabling effective data management and utilization. &lt;b&gt;Roles &amp;amp; Responsibilities:&lt;/b&gt;- Expected to be an SME.- Collaborate and manage the team to perform.- Responsible for team decisions.- Engage with multiple teams and contribute on key decisions.- Expected to provide solutions to problems that apply across multiple teams.- Facilitate knowledge sharing sessions to enhance team capabilities.- Develop and maintain documentation related to data architecture and design. &lt;b&gt;&lt;br&gt;Professional &amp;amp; Technical Skills:-&lt;/b&gt; &lt;br&gt;Must To Have Skills:&lt;br&gt;Proficiency in Data &amp;amp; AI Solution Architecture.- Strong understanding of data modeling techniques and best practices.- Experience with cloud-based data solutions and architectures.- Familiarity with data integration tools and methodologies.- Ability to design scalable and efficient data storage solutions. &lt;b&gt;Additional Information:-&lt;/b&gt; The candidate should have minimum 12 years of experience in Data &amp;amp; AI Solution Architecture.- This position is based at our Bengaluru office.- A 15 years full time education is required.&lt;br&gt; Qualification15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/808970/data-architect-at-accenture-hr-aditi/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/808970/data-architect-at-accenture-hr-aditi/</link>
  <title>[Full Time] Data Architect at Accenture HR Aditi</title>
  <dc:date>Fri, 23 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/809022/data-architect-at-accenture-hr-aditi/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Google Cloud Platform Architecture&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;15&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will design and deliver end-to-end data architecture solutions for platforms, products, or engagements on Google Cloud. You will define architectures that meet performance, scalability, security, and compliance requirements while ensuring data integrity and accessibility. You will be responsible for the successful implementation of data solutions that align with business strategy.Roles &amp;amp; Responsibilities:&lt;li&gt;Expected to be a Subject Matter Expert (SME) with deep expertise in Google Cloud data architecture.&lt;/li&gt;&lt;li&gt;Provide strategic guidance, influencing architectural decisions across multiple teams.&lt;/li&gt;&lt;li&gt;Collaborate with stakeholders to define data strategies, roadmaps, and governance models.&lt;/li&gt;&lt;li&gt;Design enterprise-grade data architectures supporting analytics, AI/ML, and operational workloads.&lt;/li&gt;&lt;li&gt;Ensure solutions adhere to best practices for security, performance, and cost optimization.&lt;/li&gt;&lt;li&gt;Lead the implementation of data architecture frameworks and reference models.&lt;/li&gt;&lt;li&gt;Guide teams on data migration, integration, and modernization initiatives.Professional &amp;amp; Technical &lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;&lt;/li&gt;&lt;li&gt;Must To Have &lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;Expertise in Google Cloud data services (BigQuery, Cloud Storage, Pub/Sub, Dataflow, Dataproc, etc.).&lt;/li&gt;&lt;li&gt;Strong knowledge of data architecture principles, data modeling, and data governance.&lt;/li&gt;&lt;li&gt;Proven experience in designing scalable, high-performance, and secure cloud-based data platforms.&lt;/li&gt;&lt;li&gt;Hands-on experience with data ingestion, ETL/ELT, streaming, and batch processing.&lt;/li&gt;&lt;li&gt;Familiarity with compliance frameworks and data security best practices in cloud environments.Additional Information:&lt;/li&gt;&lt;li&gt;The candidate should have a minimum of 16 years of experience in data architecture, with a strong focus on Google Cloud.&lt;/li&gt;&lt;li&gt;This position is based Pan India&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/809022/data-architect-at-accenture-hr-aditi/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/809022/data-architect-at-accenture-hr-aditi/</link>
  <title>[Full Time] Data Architect at Accenture HR Aditi</title>
  <dc:date>Thu, 22 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/808863/sr-software-engineer-at-encora/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;/div&gt; &lt;div&gt; We are seeking a skilled &lt;strong&gt; Software Developer &lt;/strong&gt; to design, develop, and maintain applications that meet business requirements. The ideal candidate will have strong expertise in &lt;strong&gt; Python &lt;/strong&gt; and &lt;strong&gt; SQL &lt;/strong&gt; , with exposure to modern cloud and data technologies. &lt;/div&gt; &lt;p&gt; &lt;strong&gt; Key Responsibilities: &lt;/strong&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; Design, develop, and maintain scalable applications using &lt;strong&gt; Python &lt;/strong&gt; . &lt;/li&gt; &lt;li&gt; Write efficient SQL queries for data manipulation and reporting. &lt;/li&gt; &lt;li&gt; Collaborate with cross-functional teams to gather requirements and deliver solutions. &lt;/li&gt; &lt;li&gt; Optimize application performance and troubleshoot issues. &lt;/li&gt; &lt;li&gt; Implement best practices in coding, testing, and deployment. &lt;/li&gt; &lt;li&gt; Work with version control systems (eg, Git) and CI/CD pipelines. &lt;/li&gt; &lt;/ul&gt; &lt;strong&gt; Mandatory Skills: &lt;/strong&gt; &lt;ul&gt; &lt;li&gt; &lt;strong&gt; Python &lt;/strong&gt; programming (including frameworks like Flask/Django preferred). &lt;/li&gt; &lt;li&gt; Strong knowledge of &lt;strong&gt; SQL &lt;/strong&gt; (query optimization, stored procedures). &lt;/li&gt; &lt;/ul&gt; &lt;strong&gt; Desired Skills: &lt;/strong&gt; &lt;ul&gt; &lt;li&gt; Experience with &lt;strong&gt; Snowflake &lt;/strong&gt; (data warehousing concepts, query performance tuning). &lt;/li&gt; &lt;li&gt; Familiarity with &lt;strong&gt; Microsoft Azure &lt;/strong&gt; services (Data Factory, Azure SQL, etc). &lt;/li&gt; &lt;li&gt; Knowledge of REST APIs and integration techniques. &lt;/li&gt; &lt;li&gt; Understanding of Agile methodologies. &lt;/li&gt; &lt;/ul&gt; &lt;strong&gt; Qualifications: &lt;/strong&gt; &lt;ul&gt; &lt;li&gt; Bachelor s degree in Computer Science, Engineering, or related field. &lt;/li&gt; &lt;li&gt; 2 5 years of experience in software development &lt;/li&gt; &lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Encora&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Kolkata&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/808863/sr-software-engineer-at-encora/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/808863/sr-software-engineer-at-encora/</link>
  <title>[Full Time] Sr Software Engineer at Encora</title>
  <dc:date>Thu, 22 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/809469/senior-technical-support-engineer-at-infobeans/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt;&lt;div&gt;&lt;div&gt;&lt;p&gt;&lt;b&gt;What will your role look like&lt;/b&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Act as a Senior Technical Support Engineer providing expert-level technical assistance to customers via phone and email.&lt;/li&gt;&lt;li&gt;Troubleshoot and resolve complex issues across software applications, APIs, integrations, infrastructure, cloud, and hardware environments.&lt;/li&gt;&lt;li&gt;Reproduce issues, perform root cause analysis, and collaborate closely with Engineering and Product teams on bug fixes and feature enhancements.&lt;/li&gt;&lt;li&gt;Lead or participate in incident management and on-call rotations for critical production issues.&lt;/li&gt;&lt;li&gt;Serve as a customer advocate by triaging issues to internal teams and third-party vendors, ensuring resolution within defined SLOs.&lt;/li&gt;&lt;li&gt;Support resellers by staying current with platform features and acting as a subject matter expert for diagnostics and issue resolution.&lt;/li&gt;&lt;li&gt;Identify recurring support trends and contribute to long-term product and process improvements.&lt;/li&gt;&lt;li&gt;Maintain accurate case records and status updates in ticketing systems.&lt;/li&gt;&lt;li&gt;Create and maintain technical documentation, runbooks, and FAQs.&lt;/li&gt;&lt;li&gt;Work closely with cross-functional teams and vendors to ensure timely issue resolution.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;b&gt;Why you will love this role&lt;/b&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Opportunity to work on complex, real-world technical challenges across modern SaaS, cloud, and API-driven platforms.&lt;/li&gt;&lt;li&gt;High-impact role where your expertise directly influences customer success and product improvement.&lt;/li&gt;&lt;li&gt;Strong collaboration with Engineering, Product, and Customer-facing teams.&lt;/li&gt;&lt;li&gt;Continuous learning environment with exposure to new technologies and evolving systems.&lt;/li&gt;&lt;li&gt;A fast-paced, customer-first culture that values ownership, accountability, and technical excellence.&lt;/li&gt;&lt;li&gt;Ability to make a meaningful difference by improving both customer experience and internal processes.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;b&gt;We would like you to bring along&lt;/b&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;4-7+ years of experience in technical support, solutions engineering, or a similar customer-facing technical role.&lt;/li&gt;&lt;li&gt;Strong hands-on experience with SaaS platforms, APIs, databases, and scripting (Python, Shell).&lt;/li&gt;&lt;li&gt;Experience working with cloud services such as AWS and Azure.&lt;/li&gt;&lt;li&gt;Solid understanding of web technologies, integrations, and system architecture.&lt;/li&gt;&lt;li&gt;Experience with ticketing systems (e.g., Zendesk, Jira) and knowledge base tools.&lt;/li&gt;&lt;li&gt;Strong troubleshooting, analytical, and problem-solving skills with the ability to communicate clearly under pressure.&lt;/li&gt;&lt;li&gt;Excellent verbal and written communication skills with a strong customer-first mindset.&lt;/li&gt;&lt;li&gt;Ability to prioritize effectively, manage customer expectations, and execute efficiently in a fast-paced environment.&lt;/li&gt;&lt;li&gt;Willingness to work flexible hours, including some weekends, to support a 247 business.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;b&gt;Good-to-have skills&lt;/b&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Experience working with web servers and databases such as Apache, IIS, MySQL, MSSQL, and PostgreSQL.&lt;/li&gt;&lt;li&gt;Knowledge of application protocols including DNS, HTTP, HTTPS (SSL), and FTP.&lt;/li&gt;&lt;li&gt;Basic experience working in Linux environments.&lt;/li&gt;&lt;li&gt;Familiarity with API and web service technologies such as REST, JSON, and OAuth.&lt;/li&gt;&lt;li&gt;Prior experience supporting resellers or working with third-party vendors.&lt;/li&gt;&lt;li&gt;Exposure to incident management, on-call rotations, or SRE/DevOps practices.&lt;/li&gt;&lt;li&gt;Secondary degree or relevant technical certifications.&lt;/li&gt;&lt;/ul&gt;&lt;/div&gt;&lt;div&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;div&gt;&lt;div&gt;&lt;/div&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Infobeans&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Indore&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/809469/senior-technical-support-engineer-at-infobeans/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/809469/senior-technical-support-engineer-at-infobeans/</link>
  <title>[Full Time] Senior Technical Support Engineer at Infobeans</title>
  <dc:date>Thu, 22 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/809540/sr-power-bi-developer-at-infobeans/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt;&lt;div&gt;&lt;div&gt;&lt;p&gt;&lt;strong&gt;Must have skills:&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;1. &lt;strong&gt;Power BI&lt;/strong&gt;&lt;/span&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Visualization Produce compelling and informative visualizations using various native and custom chart types. Create and maintain relationships between visuals, filters, bookmarks, and numeric/field parameters. Design strategic visual interactions that enhance the end-users experience using cross- filtering and cross- highlighting. &lt;/li&gt;&lt;li&gt;DAX Language Write and optimize DAX expressions to create measures and calculated columns. Familiarity with common filtering functions (CALCULATE, FILTER, etc.) and iteration functions (SUMX, AVERAGEX, etc.) &lt;/li&gt;&lt;li&gt;Modeling Create effective data models. Maintain relationship cardinality and cross- filtering between tables. Understand the use cases for Import, DirectQuery, Dual, and Live data storage modes. &lt;/li&gt;&lt;li&gt;Publishing Manage online deployment pipelines to test and publish Power BI reports. Manage user roles and implement row-level security to restrict data access. &lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;span&gt;&lt;strong&gt;2. Power Query&lt;/strong&gt;&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;Preparation Clean, transform, reshape, and aggregate data from different sources such as Excel, SQL Server, SharePoint, etc. Create dynamic, reusable queries using parameters. &lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;M Language Knowledge of different data types and data structures like values, records, tables, lists, etc. Familiarity with built-in functions and the ability to write custom functions. Understand native query folding to optimize performance. &lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;3. SQL Experience writing and optimizing queries. Strong understanding of relational databases&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Key Responsibilities:&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;Design &amp;amp; develop PowerBI dashboards and reports&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Experience: &lt;/strong&gt;&lt;span&gt;Min 8 years (Preferrable 10+ years of experience)&lt;/span&gt;&lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;div&gt;&lt;div&gt;&lt;/div&gt;&lt;/div&gt;&lt;br&gt;&lt;b&gt;Location - &lt;/b&gt;Pune,Nagar,Indore,Bengaluru&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Analyst&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Infobeans&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Indore&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/809540/sr-power-bi-developer-at-infobeans/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/809540/sr-power-bi-developer-at-infobeans/</link>
  <title>[Full Time] Sr. Power BI Developer at Infobeans</title>
  <dc:date>Thu, 22 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/808798/senior-software-engineer-informatica-developer-at-cgi/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;Job Title: SSE&lt;br&gt;Position: Senior Software Engineer&lt;br&gt; Experience: 6- 8 Years &lt;br&gt;Category: Development&lt;br&gt;Main location: Hyderabad&lt;br&gt;Shift: 11am to 8pm&lt;br&gt;Work model: Initial 8 weeks WFO, continued with hybrid (3 Days in a week)Education Qualification: &lt;br&gt;Bachelor s degree in related field or higher with minimum 4 years of relevant experience.&lt;/p&gt; &lt;br&gt; &lt;br&gt; &lt;br&gt; &lt;div&gt; &lt;br&gt; Your future duties and responsibilities: &lt;br&gt; &lt;p&gt;Development experience (6-8 Years) with strong ETL(Informatica) Skillset with good knowledge on Teradata and Oracle database&lt;br&gt; Candidate must have SAS experience.&lt;br&gt; Good knowledge of Unix, SQL, PL-SQL is required.&lt;br&gt; CA7 scheduling tool knowledge is good to have.&lt;br&gt; Prepares requirement definition, design, technical specifications.&lt;br&gt; Provides coding, testing and implementation support for identified technical platform (i.e., Mainframe, Mid-range, Distributed or Web).&lt;br&gt; Analyzes user requirements, and defines technical project scope and assumptions for assigned tasks.&lt;br&gt; Creates business and/or technical designs for new systems, and/or modifications to existing systems&lt;/p&gt; &lt;br&gt; &lt;/div&gt;&lt;br&gt; &lt;br&gt; &lt;div&gt; &lt;br&gt; Skills: &lt;br&gt; &lt;ul&gt; &lt;li&gt;Informatica&lt;/li&gt; &lt;li&gt;Oracle&lt;/li&gt; &lt;li&gt;SAS Enterprise Guide&lt;/li&gt; &lt;li&gt;SQL&lt;/li&gt; &lt;li&gt;Unix&lt;/li&gt; &lt;/ul&gt; &lt;br&gt; &lt;/div&gt;&lt;br&gt; &lt;br&gt; &lt;div&gt; &lt;br&gt; What you can expect from us: &lt;br&gt; &lt;p&gt; &lt;strong&gt;Together, as owners, let s turn meaningful insights into action.&lt;/strong&gt; Life at CGI is rooted in ownership, teamwork, respect and belonging. Here, you ll reach your full potential because You are invited to be an owner from day 1 as we work together to bring our Dream to life. That s why we call ourselves CGI Partners rather than employees. We benefit from our collective success and actively shape our company s strategy and direction.Your work creates value. You ll develop innovative solutions and build relationships with teammates and clients while accessing global capabilities to scale your ideas, embrace new opportunities, and benefit from expansive industry and technology expertise.You ll shape your career by joining a company built to grow and last. You ll be supported by leaders who care about your health and well-being and provide you with opportunities to deepen your skills and broaden your horizons. Come join our team one of the largest IT and business consulting services firms in the world.&lt;/p&gt; &lt;br&gt; &lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Developer / Engineer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;CGI&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/808798/senior-software-engineer-informatica-developer-at-cgi/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/808798/senior-software-engineer-informatica-developer-at-cgi/</link>
  <title>[Full Time] Senior Software Engineer-Informatica Developer at CGI</title>
  <dc:date>Thu, 22 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/808976/data-architect-at-accenture-hr-aditi/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Databricks Unified Data Analytics Platform&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the data architecture aligns with the overall business objectives and technical specifications. You will collaborate with various teams to ensure that the data architecture is robust, scalable, and efficient, while also addressing any challenges that arise during the development process. Your role will be pivotal in shaping the data landscape of the organization, enabling data-driven decision-making and fostering innovation through effective data management practices. Responsibilities:&lt;li&gt;Develop high-quality, scalable ETL/ELT pipelines using Databricks technologies including Delta Lake, Auto Loader, and DLT.&lt;/li&gt;&lt;li&gt;Excellent programming and debugging skills in Python.&lt;/li&gt;&lt;li&gt;Strong hands-on experience with Py Spark to build efficient data transformation and validation logic.&lt;/li&gt;&lt;li&gt;Must be proficient in at least one cloud platform:AWS, GCP, or Azure.&lt;/li&gt;&lt;li&gt;Create modular DBX functions for transformation, PII masking, and validation logic &quot; reusable across DLT and notebook pipelines.&lt;/li&gt;&lt;li&gt;Implement ingestion patterns using Auto Loader with checkpointing and schema evolution for structured and semi-structured data.&lt;/li&gt;&lt;li&gt;Build secure and observable DLT pipelines with DLT Expectations, supporting Bronze/Silver/Gold medallion layering.&lt;/li&gt;&lt;li&gt;Configure Unity Catalog:set up catalogs, schemas, user/group access, enable audit logging, and define masking for PII fields.&lt;/li&gt;&lt;li&gt;Enable secure data access across domains and workspaces via Unity Catalog External Locations, Volumes, and lineage tracking.&lt;/li&gt;&lt;li&gt;Access and utilize data assets from the Databricks Marketplace to support enrichment, model training, or benchmarking.&lt;/li&gt;&lt;li&gt;Collaborate with data sharing stakeholders to implement Delta Sharing &quot; both internally and externally.&lt;/li&gt;&lt;li&gt;Integrate Power BI/Tableau/Looker with Databricks using optimized connectors (ODBC/JDBC) and Unity Catalog security controls.&lt;/li&gt;&lt;li&gt;Build stakeholder-facing SQL Dashboards within Databricks to monitor KPIs, data pipeline health, and operational SLAs.&lt;/li&gt;&lt;li&gt;Prepare Gen AI-compatible datasets:manage vector embeddings, index with Databricks Vector Search, and use Feature Store with ML flow.&lt;/li&gt;&lt;li&gt;Package and deploy pipelines using Databricks Asset Bundles through CI/CD pipelines in GitHub or GitLab.&lt;/li&gt;&lt;li&gt;Troubleshoot, tune, and optimize jobs using Photon engine and serverless compute, ensuring cost efficiency and SLA reliability.&lt;/li&gt;&lt;li&gt;Experience with cloud-based services relevant to data engineering, data storage, data processing, data warehousing, real-time streaming, and serverless computing.&lt;/li&gt;&lt;li&gt;Hands on Experience in applying Performance optimization techniques&lt;/li&gt;&lt;li&gt;Understanding data modeling and data warehousing principles is essential.Nice to Have:1.Certifications:Databricks Certified Professional or similar certifications.2.Machine Learning:Knowledge of machine learning concepts and experience with popular ML libraries.3.Knowledge of big data processing (e.g., Spark, Hadoop, Hive, Kafka)4.Data Orchestration:Apache Airflow.5.Knowledge of CI/CD pipelines and DevOps practices in a cloud environment.6.Experience with ETL tools like Informatica, Talend, Mati Llion, or Five Tran.7.Familiarity with DBT (Data Build Tool)Additional Information:&lt;/li&gt;&lt;li&gt;The candidate should have minimum 7.5 years of experience in Databricks Unified Data Analytics Platform.&lt;/li&gt;&lt;li&gt;This position is based at our Bengaluru office.&lt;br&gt;Educational Qualification:&lt;/li&gt;&lt;li&gt;15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/808976/data-architect-at-accenture-hr-aditi/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/808976/data-architect-at-accenture-hr-aditi/</link>
  <title>[Full Time] Data Architect at Accenture HR Aditi</title>
  <dc:date>Thu, 22 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/809109/data-architect-at-accenture-hr-aditi/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Google Cloud Platform Architecture&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;15&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will design and deliver end-to-end data architecture solutions for platforms, products, or engagements on Google Cloud. You will define architectures that meet performance, scalability, security, and compliance requirements while ensuring data integrity and accessibility. You will be responsible for the successful implementation of data solutions that align with business strategy.Roles &amp;amp; Responsibilities:&lt;li&gt;Expected to be a Subject Matter Expert (SME) with deep expertise in Google Cloud data architecture.&lt;/li&gt;&lt;li&gt;Provide strategic guidance, influencing architectural decisions across multiple teams.&lt;/li&gt;&lt;li&gt;Collaborate with stakeholders to define data strategies, roadmaps, and governance models.&lt;/li&gt;&lt;li&gt;Design enterprise-grade data architectures supporting analytics, AI/ML, and operational workloads.&lt;/li&gt;&lt;li&gt;Ensure solutions adhere to best practices for security, performance, and cost optimization.&lt;/li&gt;&lt;li&gt;Lead the implementation of data architecture frameworks and reference models.&lt;/li&gt;&lt;li&gt;Guide teams on data migration, integration, and modernization initiatives.Professional &amp;amp; Technical &lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;&lt;/li&gt;&lt;li&gt;Must To Have &lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;Expertise in Google Cloud data services (BigQuery, Cloud Storage, Pub/Sub, Dataflow, Dataproc, etc.).&lt;/li&gt;&lt;li&gt;Strong knowledge of data architecture principles, data modeling, and data governance.&lt;/li&gt;&lt;li&gt;Proven experience in designing scalable, high-performance, and secure cloud-based data platforms.&lt;/li&gt;&lt;li&gt;Hands-on experience with data ingestion, ETL/ELT, streaming, and batch processing.&lt;/li&gt;&lt;li&gt;Familiarity with compliance frameworks and data security best practices in cloud environments.Additional Information:&lt;/li&gt;&lt;li&gt;The candidate should have a minimum of 16 years of experience in data architecture, with a strong focus on Google Cloud.&lt;/li&gt;&lt;li&gt;This position is based Pan India&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/809109/data-architect-at-accenture-hr-aditi/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/809109/data-architect-at-accenture-hr-aditi/</link>
  <title>[Full Time] Data Architect at Accenture HR Aditi</title>
  <dc:date>Thu, 22 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/809543/sap-developer-lead-at-infogain/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;p&gt; &lt;strong&gt;Must Have:&lt;/strong&gt; &lt;/p&gt; &lt;p&gt; 8+ years of experience in L1/L2 support within SAP Commerce (Hybris) environment&lt;/p&gt; &lt;p&gt; Practical experience with Hybris Backoffice, working on Impex and Hybris Administration Console (HAC).&lt;/p&gt; &lt;p&gt; Experience monitoring and analysing system performance, utilizing logging and monitoring tools.&lt;/p&gt; &lt;p&gt; Execute &lt;strong&gt;minor fixes&lt;/strong&gt; such as Impex corrections, configuration updates, and property adjustments as and when needed.&lt;/p&gt; &lt;p&gt; Proficiency in handling Impex files for data imports/exports and troubleshooting data-related issues.&lt;/p&gt; &lt;p&gt; Experience with troubleshooting of server-side issues using HAC and Hybris logs.&lt;/p&gt; &lt;p&gt; Strong communication skills, with the ability to explain technical concepts to non-technical users and teams.&lt;/p&gt; &lt;p&gt; Familiarity with service management tools like JIRA for tracking incidents and requests.&lt;/p&gt; &lt;p&gt; Ability to prioritize and manage multiple incidents or service requests in a fast-paced environment.&lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Excellent verbal and written communication skills with the ability to communicate with all levels of the organization, from senior management to staff level teams.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Extreme responsiveness with ability to work under pressure in a crisis, maintaining a clear sense of urgency&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Superior work ethic with a positive, can-do attitude.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Excellent customer service skills and superior telephone etiquette.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Demonstrated attention to detail and excellent time management.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Participate in on-call rotation and incident postmortems to ensure continuous improvement.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Superior collaboration skills.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Flexibility with the ability to change priorities quickly, focus on new ones without distraction.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Ability to deal with conflict and work under pressure to meet deliverable commitments.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Technical aptitude and a passion for learning about new emerging technologies.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Ability and willingness to travel internationally and adjust work hours to accommodate international teams. &lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;strong&gt;Mandatory Technology Stack:&lt;/strong&gt; &lt;/p&gt; &lt;p&gt; Platform: SAP Commerce (Hybris) - majorly (Hybris Backoffice, Hybris Administration Console (HAC))&lt;/p&gt; &lt;p&gt; Monitoring tools like Google Cloud Logging, Zabbix, Splunk, Graylogs etc.&lt;/p&gt; &lt;p&gt; Strong knowledge of databases (SQL) preferably MySQL&lt;/p&gt; &lt;p&gt; Git, Bamboo, Jenkins&lt;/p&gt; &lt;p&gt; Incident management tools like Jira or ServiceNow&lt;/p&gt; &lt;p&gt; Cloud Platforms (Preferred): Google Cloud&lt;/p&gt; &lt;p&gt; Prior experience in ECommerce solutions is a must.&lt;/p&gt; &lt;p&gt; &lt;strong&gt;Good to have:&lt;/strong&gt; &lt;/p&gt; &lt;p&gt;Experience in handling both web and mobile app support projects&lt;/p&gt; &lt;p&gt;Experience in mobile app related tools like Firebase, Flutter, Mobile APIs etc&lt;/p&gt; &lt;p&gt;Experience with projects focused on both infrastructures and software solutions.&lt;/p&gt; &lt;p&gt;Knowledge of different languages like French, Spanish, Korean and Mandarin is a plus.&lt;/p&gt; &lt;/div&gt; EXPERIENCE &lt;br&gt; &lt;ul&gt;&lt;br&gt; &lt;li&gt;8-11 Years&lt;/li&gt;&lt;br&gt; &lt;/ul&gt;&lt;br&gt; SKILLS &lt;br&gt; &lt;ul&gt;&lt;br&gt; &lt;li&gt;Primary Skill: SAP Functional&lt;/li&gt; &lt;li&gt;Sub Skill(s): SAP Functional &lt;/li&gt;&lt;br&gt; &lt;br&gt; &lt;li&gt;Additional Skill(s): SAP Functional - CS, SAP Hybris Functional, SAP Hybris Technical, SAP Technical, SAP Hybris Testing, SAP Hybris Development&lt;/li&gt;&lt;br&gt; &lt;br&gt; &lt;/ul&gt;&lt;br&gt; ABOUT THE COMPANY &lt;br&gt; &lt;span&gt;&lt;br&gt; &lt;p&gt;Infogain is a human-centered digital platform and software engineering company based out of Silicon Valley. We engineer business outcomes for Fortune 500 companies and digital natives in the technology, healthcare, insurance, travel, telecom, and retail &amp;amp; CPG industries using technologies such as cloud, microservices, automation, IoT, and artificial intelligence. We accelerate experience-led transformation in the delivery of digital platforms. Infogain is also a Microsoft (NASDAQ: MSFT) Gold Partner and Azure Expert Managed Services Provider (MSP).&lt;/p&gt;&lt;br&gt;&lt;p&gt;Infogain, an Apax Funds portfolio company, has offices in California, Washington, Texas, the UK, the UAE, and Singapore, with delivery centers in Seattle, Houston, Austin, Krak w, Noida, Gurgaon, Mumbai, Pune, and Bengaluru.&lt;/p&gt;&lt;br&gt; &lt;/span&gt;&lt;br&gt; &lt;br&gt; &lt;br&gt; &lt;br&gt; &lt;br&gt; &lt;div&gt;&lt;br&gt; &lt;div&gt;&lt;/div&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Infogain&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/809543/sap-developer-lead-at-infogain/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/809543/sap-developer-lead-at-infogain/</link>
  <title>[Full Time] SAP Developer (Lead) at Infogain</title>
  <dc:date>Thu, 22 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/809029/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Azure Analytics Services&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;12&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the architecture aligns with business needs and technical specifications. You will collaborate with various teams to ensure that data flows seamlessly and efficiently throughout the organization, while also addressing any challenges that arise in the data management process. Your role will be pivotal in shaping the data landscape of the organization, enabling informed decision-making and strategic planning. Roles &amp;amp; Responsibilities:A.Function as the Lead Data Architect for a small, simple project/proposal or as a team lead for medium/large sized project or proposalB.Discuss specific Big data architecture and related issues with client architect/team (in area of expertise)C.Analyze and assess the impact of the requirements on the data and its lifecycleD.Lead Big data architecture and design medium-big Cloud based, Big Data and Analytical Solutions using Lambda architecture.E.Breadth of experience in various client scenarios and situationsF.Experienced in Big Data Architecture-based sales and deliveryG.Thought leadership and innovationH.Lead creation of new data assets &amp;amp; offeringsI.Experience in handling OLTP and OLAP data workloads Professional &amp;amp; Technical &lt;b&gt;Skills:&lt;/b&gt;&lt;li&gt;A.Strong experience in Azure is preferred with hands-on experience in two or more of these skills :Azure Synapse Analytics, Azure HDInsight, Azure Databricks with PySpark / Scala / SparkSQL, Azure Analysis ServicesB.Experience in one or more Real-time/Streaming technologies including:Azure Stream Analytics, Azure Data Explorer, Azure Time Series Insights, etc.C.Experience in handling medium to large Big Data implementationsD.Candidate must have around 5 years of extensive Big data experienceE.Candidate must have 15 years of IT experience and around 5 years of extensive Big data experience (design + build) &lt;br&gt;Additional Information:A.Should be able to drive the technology design meetings, propose technology design and architecture B.Should have excellent client communication skillsC.Should have good analytical and problem-solving skills&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/809029/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/809029/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Thu, 22 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/808776/custom-software-engineer-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Custom Software Engineer&lt;br&gt; &lt;b&gt;Project Role Description :&lt;/b&gt;Develop custom software solutions to design, code, and enhance components across systems or applications. Use modern frameworks and agile practices to deliver scalable, high-performing solutions tailored to specific business needs. &lt;br&gt; &lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Azure Data Services&lt;br&gt; &lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;2&lt;/b&gt; year(s) of experience is required&lt;br&gt; &lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As an Application Developer, you will be responsible for designing, building, and configuring applications to meet business process and application requirements. You will play a crucial role in developing solutions to enhance business operations and efficiency. &lt;b&gt;Roles &amp;amp; Responsibilities:&lt;/b&gt;- Expected to perform independently and become an SME.- Required active participation/contribution in team discussions.- Contribute in providing solutions to work-related problems.- Collaborate with cross-functional teams to design and develop applications.- Implement best practices for application development.- Troubleshoot and debug applications to ensure optimal performance.- Stay updated with the latest technologies and trends in application development.- Provide technical guidance and mentorship to junior team members. &lt;br&gt;&lt;b&gt;Professional &amp;amp; Technical Skills:&lt;/b&gt;- &lt;br&gt;Must To Have Skills:&lt;br&gt;Proficiency in Microsoft Azure Data Services.- Strong understanding of cloud-based application development.- Experience with data storage and management in Azure environment.- Knowledge of Azure DevOps for continuous integration and deployment.- Hands-on experience in building scalable and secure applications on Azure platform. &lt;b&gt;Additional Information:&lt;/b&gt;- The candidate should have a minimum of 3 years of experience in Microsoft Azure Data Services.- This position is based at our Bengaluru office.- A 15 years full-time education is required.&lt;br&gt; Qualification15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/808776/custom-software-engineer-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/808776/custom-software-engineer-at-accenture/</link>
  <title>[Full Time] Custom Software Engineer at Accenture</title>
  <dc:date>Thu, 22 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/808037/operations-engineer-at-accenture-hr-aditi/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role &lt;/b&gt;Operations Engineer&lt;br&gt;&lt;b&gt;Project Role Description &lt;/b&gt;Support the operations and/or manage delivery for production systems and services based on operational requirements and service agreement. &lt;br&gt;&lt;b&gt;Must have skills &lt;/b&gt;Microsoft SQL Server Administration, Database Architecture, Oracle Database Administration (DBA)&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills &lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;12&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification &lt;/b&gt;15 years full time education&lt;br&gt;&lt;b&gt;Summary&lt;/b&gt;As an Operations Engineer, you will support the operations and manage delivery for production systems and services based on operational requirements and service agreements. Your typical day will involve monitoring system performance, troubleshooting issues, and collaborating with various teams to ensure seamless operations. You will also engage in planning and executing maintenance activities, ensuring that all systems are running optimally and in compliance with established protocols. Your role will require you to be proactive in identifying potential issues and implementing solutions to enhance system reliability and efficiency. &lt;br&gt;Roles &amp;amp; Responsibilities:&lt;br&gt; &lt;li&gt;Expected to be an SME.&lt;/li&gt;&lt;li&gt;Collaborate and manage the team to perform.&lt;/li&gt;&lt;li&gt;Responsible for team decisions.&lt;/li&gt;&lt;li&gt;Engage with multiple teams and contribute on key decisions.&lt;/li&gt;&lt;li&gt;Expected to provide solutions to problems that apply across multiple teams.&lt;/li&gt;&lt;li&gt;Facilitate knowledge sharing sessions to enhance team capabilities.&lt;/li&gt;&lt;li&gt;Monitor system performance metrics and implement improvements. &lt;br&gt;Professional &amp;amp; Technical &lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt; &lt;/li&gt;&lt;li&gt;&lt;br&gt;Must To Have &lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;&lt;br&gt; Proficiency in Microsoft SQL Server Administration, Database Architecture, Oracle Database Administration (DBA).&lt;/li&gt;&lt;li&gt;Experience with performance tuning and optimization of SQL queries.&lt;/li&gt;&lt;li&gt;Strong understanding of backup and recovery strategies for SQL Server.&lt;/li&gt;&lt;li&gt;Familiarity with cloud-based database solutions and their management.&lt;/li&gt;&lt;li&gt;Ability to implement security measures for database systems. &lt;br&gt;Additional Information:&lt;br&gt; &lt;/li&gt;&lt;li&gt;The candidate should have minimum 12 years of experience in Microsoft SQL Server Administration.&lt;/li&gt;&lt;li&gt;This position is based at our Bengaluru office.&lt;/li&gt;&lt;li&gt;A 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/808037/operations-engineer-at-accenture-hr-aditi/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/808037/operations-engineer-at-accenture-hr-aditi/</link>
  <title>[Full Time] Operations Engineer at Accenture HR Aditi</title>
  <dc:date>Thu, 22 Jan 2026 01:03:23 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/807860/senior-database-administrator-at-sunquest-information/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;ul&gt; &lt;li&gt; Clinisys is a global provider of intelligent diagnostic informatics solutions and expertise designed to redefine the modern laboratory, across healthcare, life sciences, and public health. &lt;/li&gt; &lt;li&gt; Millions of diagnostic results and data insights are generated every day using Clinisysplatform and cloudbased solutions in over 3,000 laboratories across 34 countries. &lt;/li&gt; &lt;li&gt; Headquartered in Tucson, Arizona, and Woking, England, Clinisysmission is to enhance the effectiveness of diagnostic workflows in any laboratory or testing environment and keep citizens and communities healthier and safer. &lt;/li&gt; &lt;li&gt; Purpose. &lt;/li&gt; &lt;li&gt; As a Senior Database Engineer, you will be responsible for the design, implementation, maintenance, and optimization of complex database systems. &lt;/li&gt; &lt;li&gt; This includes ensuring data integrity, security, performance, and availability, leading projects, mentoring junior DBAs, and making strategic decisions regarding database architecture and infrastructure within Clinisys. &lt;/li&gt; &lt;li&gt; You will work closely with data architects, software architects, software engineers, and other business stakeholders to ensure that our database designs employ best practices of schema design and optimization in the context of each product application. &lt;/li&gt; &lt;li&gt; This role requires a proactive and resourceful individual with a solid understanding of database and schema design, performance tuning, production application development, and cloud architecture. &lt;/li&gt; &lt;li&gt; Essential Functions/ Job Responsibilities. &lt;/li&gt; &lt;li&gt; Administer, maintain, and enhance database systems supporting SaaS applications. &lt;/li&gt; &lt;li&gt; Ensure high availability, performance, and security of database environments. &lt;/li&gt; &lt;li&gt; Troubleshoot and resolve complex database issues across multiple platforms, including Oracle, SQL Server, and Sybase, ensuring minimal downtime and quick resolution. &lt;/li&gt; &lt;li&gt; Identify and resolve performance bottlenecks, deadlocks, and query execution issues. &lt;/li&gt; &lt;li&gt; Diagnose issues related to replication, data corruption, and storage optimization. &lt;/li&gt; &lt;li&gt; Utilize advanced diagnostic tools and methodologies to uncover root causes. &lt;/li&gt; &lt;li&gt; Develop and execute comprehensive backup and recovery strategies. &lt;/li&gt; &lt;li&gt; Collaborate with software development teams to integrate database solutions with applications. &lt;/li&gt; &lt;li&gt; Monitor database performance and implement improvements as needed. &lt;/li&gt; &lt;li&gt; Manage database access, roles and security policies. &lt;/li&gt; &lt;li&gt; Stay up to date with the latest cloud-managed database technologies and best practices. &lt;/li&gt; &lt;li&gt; Skills Needed To Be Successful. &lt;/li&gt; &lt;li&gt; Proven experience as a Database Administrator or Engineer, with a focus on Oracle databases, schema design, and product development. &lt;/li&gt; &lt;li&gt; Experience with monitoring with Oracle Enterprise Manager. &lt;/li&gt; &lt;li&gt; Experience with DataGuard and Oracle DR/BC configuration. &lt;/li&gt; &lt;li&gt; Proficiency in SQL and NoSQL databases. &lt;/li&gt; &lt;li&gt; Experience with healthcare and/or laboratory software is highly desired. &lt;/li&gt; &lt;li&gt; Ensuring compliance with data privacy regulations (GDPR, HIPAA) and industry standards. &lt;/li&gt; &lt;li&gt; Experience with cloud database platforms; Azure managed database experience is highly desired. &lt;/li&gt; &lt;li&gt; Experience in implementing high-availability solutions such as clustering, mirroring, and replication. &lt;/li&gt; &lt;li&gt; Experience with high traffic and highly available website architectures and operations. &lt;/li&gt; &lt;li&gt; Knowledge and experience preferred with AI-driven development and automation tools such as GitHub Copilot, Copilot Studio, and related platforms to optimize database administration tasks, implement intelligent query tuning, and enhance operational efficiency. &lt;/li&gt; &lt;li&gt; Demonstrated understanding of AI concepts including Model Context Protocol (MCP), Prompt Engineering, and integration of AI-assisted workflows into DevOps and CI/CD pipelines. &lt;/li&gt; &lt;li&gt; Required Experience &amp;amp; Education. &lt;/li&gt; &lt;li&gt; Bachelor&apos;s degree in business or computer science, or equivalent education and work experience combined. &lt;/li&gt; &lt;li&gt; Minimum 5 to 7 years of Oracle database administration and Oracle Data Guard experience. &lt;/li&gt; &lt;li&gt; Familiarity with database monitoring tools and automation techniques for proactive issue resolution. &lt;/li&gt; &lt;li&gt; Strong understanding of software product development and related processes. &lt;/li&gt; &lt;li&gt; Excellent problem-solving and analytical skills. &lt;/li&gt; &lt;li&gt; Strong communication and collaboration skills. &lt;/li&gt; &lt;li&gt; Preferred Experience &amp;amp; Education. &lt;/li&gt; &lt;li&gt; Database certification(s) preferred. &lt;/li&gt; &lt;li&gt; Prior experience with server class hardware, operating systems, and virtualization. &lt;/li&gt; &lt;li&gt; Supervisory Responsibilities. &lt;/li&gt; &lt;li&gt; None. &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Software Product&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Sunquest Information&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/807860/senior-database-administrator-at-sunquest-information/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/807860/senior-database-administrator-at-sunquest-information/</link>
  <title>[Full Time] Senior Database Administrator at Sunquest Information</title>
  <dc:date>Wed, 21 Jan 2026 15:40:54 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/807791/ibm-datastage-developer-at-capgemini/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;div&gt; &lt;div&gt; &lt;b&gt; &lt;b&gt;Your Role&lt;/b&gt; &lt;/b&gt;&lt;br&gt;&lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt;Design, develop, and maintain ETL processes using DataStage (must-have) and Talend (good to have).&lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Ensure efficient extraction, transformation, and loading of data from various sources into Big Data/Data Warehouse.&lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Responsible for Linux and Shell scripting, along with SQL &amp;amp; PL/SQL.&lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Collaborate with stakeholders including data modelers and business analysts to understand requirements and translate them into technical specifications/solutions.&lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Work in Agile methodology using JIRA.&lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;b&gt; &lt;b&gt;Your Profile&lt;/b&gt; &lt;/b&gt;&lt;br&gt;&lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt;Design, develop, and implement ETL pipelines using DataStage and Talend.&lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Optimize ETL pipelines for performance and scalability.&lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Maintain documentation for ETL processes, workflows, and system architecture.&lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Troubleshoot and resolve code issues and errors.&lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Demonstrate strong analytical and problem-solving skills.&lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Proficient in SQL and experienced with RDBMS like Oracle and Snowflake.&lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Familiar with data modeling concepts and tools/techniques.&lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Skilled in Shell scripting and Linux.&lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;b&gt; &lt;b&gt;&lt;/b&gt;&lt;/b&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;ETL Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Capgemini&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/807791/ibm-datastage-developer-at-capgemini/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/807791/ibm-datastage-developer-at-capgemini/</link>
  <title>[Full Time] IBM Datastage developer at Capgemini</title>
  <dc:date>Wed, 21 Jan 2026 14:04:29 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/809017/data-architect-at-accenture-hr-aditi/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Azure Data Services&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;3&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the architecture aligns with business needs and technical specifications. You will collaborate with various stakeholders to gather requirements and translate them into effective data solutions, while also addressing any challenges that arise during the development process. Your role will be pivotal in establishing a robust data framework that supports the overall objectives of the organization. Roles &amp;amp; Responsibilities&lt;li&gt;Expected to perform independently and become an SME.&lt;/li&gt;&lt;li&gt;Required active participation/contribution in team discussions.&lt;/li&gt;&lt;li&gt;Contribute in providing solutions to work related problems.&lt;/li&gt;&lt;li&gt;Engage in continuous learning to stay updated with industry trends and best practices.&lt;/li&gt;&lt;li&gt;Collaborate with cross-functional teams to ensure data architecture meets business needs. Professional &amp;amp; Technical &lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;&lt;br&gt;Must To Have &lt;br&gt;&lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;&lt;br&gt;Proficiency in Microsoft Azure Data Services.&lt;/li&gt;&lt;li&gt;Strong understanding of data modeling techniques and best practices.&lt;/li&gt;&lt;li&gt;Experience with data integration tools and ETL processes.&lt;/li&gt;&lt;li&gt;Familiarity with cloud-based data storage solutions and architectures.&lt;/li&gt;&lt;li&gt;Ability to design and implement data governance frameworks. Additional Information:&lt;/li&gt;&lt;li&gt;The candidate should have minimum 3 years of experience in Microsoft Azure Data Services.&lt;/li&gt;&lt;li&gt;This position is based at our Bengaluru office.&lt;/li&gt;&lt;li&gt;A 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/809017/data-architect-at-accenture-hr-aditi/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/809017/data-architect-at-accenture-hr-aditi/</link>
  <title>[Full Time] Data Architect at Accenture HR Aditi</title>
  <dc:date>Wed, 21 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/808979/data-architect-at-accenture-hr-aditi/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Databricks Unified Data Analytics Platform&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the data architecture aligns with the overall business objectives and technical specifications. You will collaborate with various teams to ensure that the data architecture is robust, scalable, and efficient, while also addressing any challenges that arise during the development process. Your role will be pivotal in shaping the data landscape of the organization, enabling data-driven decision-making and fostering innovation through effective data management practices. Responsibilities:Develop high-quality, scalable ETL/ELT pipelines using Databricks technologies including Delta Lake, Auto Loader, and DLT.Excellent programming and debugging skills in Python.Strong hands-on experience with Py Spark to build efficient data transformation and validation logic.Must be proficient in at least one cloud platform:AWS, GCP, or Azure.Create modular DBX functions for transformation, PII masking, and validation logic reusable across DLT and notebook pipelines.Implement ingestion patterns using Auto Loader with checkpointing and schema evolution for structured and semi-structured data.Build secure and observable DLT pipelines with DLT Expectations, supporting Bronze/Silver/Gold medallion layering.Configure Unity Catalog:set up catalogs, schemas, user/group access, enable audit logging, and define masking for PII fields.Enable secure data access across domains and workspaces via Unity Catalog External Locations, Volumes, and lineage tracking.Access and utilize data assets from the Databricks Marketplace to support enrichment, model training, or benchmarking.Collaborate with data sharing stakeholders to implement Delta Sharing both internally and externally.Integrate Power BI/Tableau/Looker with Databricks using optimized connectors (ODBC/JDBC) and Unity Catalog security controls.Build stakeholder-facing SQL Dashboards within Databricks to monitor KPIs, data pipeline health, and operational SLAs.Prepare Gen AI-compatible datasets:manage vector embeddings, index with Databricks Vector Search, and use Feature Store with ML flow.Package and deploy pipelines using Databricks Asset Bundles through CI/CD pipelines in GitHub or GitLab.Troubleshoot, tune, and optimize jobs using Photon engine and serverless compute, ensuring cost efficiency and SLA reliability.Experience with cloud-based services relevant to data engineering, data storage, data processing, data warehousing, real-time streaming, and serverless computing.Hands on Experience in applying Performance optimization techniquesUnderstanding data modeling and data warehousing principles is essential.Nice to Have:1.Certifications:Databricks Certified Professional or similar certifications.2.Machine Learning:Knowledge of machine learning concepts and experience with popular ML libraries.3.Knowledge of big data processing (e.g., Spark, Hadoop, Hive, Kafka)4.Data Orchestration:Apache Airflow.5.Knowledge of CI/CD pipelines and DevOps practices in a cloud environment.6.Experience with ETL tools like Informatica, Talend, Mati Llion, or Five Tran.7.Familiarity with DBT (Data Build Tool)Additional Information:- The candidate should have minimum 7.5 years of experience in Databricks Unified Data Analytics Platform.- This position is based at our Bengaluru office.&lt;br&gt;Educational Qualification:- 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/808979/data-architect-at-accenture-hr-aditi/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/808979/data-architect-at-accenture-hr-aditi/</link>
  <title>[Full Time] Data Architect at Accenture HR Aditi</title>
  <dc:date>Wed, 21 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/809028/data-architect-at-accenture-hr-aditi/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Databricks Unified Data Analytics Platform&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;UNIX, Sun Solaris, HP UX, IBM &lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the data architecture aligns with the overall business objectives and technical specifications. You will collaborate with various teams to ensure that the data architecture is robust, scalable, and efficient, while also addressing any challenges that arise during the development process. Your role will be pivotal in shaping the data landscape of the organization, enabling data-driven decision-making and fostering innovation through effective data management practices. Responsibilities:Develop high-quality, scalable ETL/ELT pipelines using Databricks technologies including Delta Lake, Auto Loader, and DLT.Excellent programming and debugging skills in Python.Strong hands-on experience with Py Spark to build efficient data transformation and validation logic.Must be proficient in at least one cloud platform:AWS, GCP, or Azure.Create modular DBX functions for transformation, PII masking, and validation logic reusable across DLT and notebook pipelines.Implement ingestion patterns using Auto Loader with checkpointing and schema evolution for structured and semi-structured data.Build secure and observable DLT pipelines with DLT Expectations, supporting Bronze/Silver/Gold medallion layering.Configure Unity Catalog:set up catalogs, schemas, user/group access, enable audit logging, and define masking for PII fields.Enable secure data access across domains and workspaces via Unity Catalog External Locations, Volumes, and lineage tracking.Access and utilize data assets from the Databricks Marketplace to support enrichment, model training, or benchmarking.Collaborate with data sharing stakeholders to implement Delta Sharing both internally and externally.Integrate Power BI/Tableau/Looker with Databricks using optimized connectors (ODBC/JDBC) and Unity Catalog security controls.Build stakeholder-facing SQL Dashboards within Databricks to monitor KPIs, data pipeline health, and operational SLAs.Prepare Gen AI-compatible datasets:manage vector embeddings, index with Databricks Vector Search, and use Feature Store with ML flow.Package and deploy pipelines using Databricks Asset Bundles through CI/CD pipelines in GitHub or GitLab.Troubleshoot, tune, and optimize jobs using Photon engine and serverless compute, ensuring cost efficiency and SLA reliability.Experience with cloud-based services relevant to data engineering, data storage, data processing, data warehousing, real-time streaming, and serverless computing.Hands on Experience in applying Performance optimization techniquesUnderstanding data modeling and data warehousing principles is essential.Nice to Have:1.Certifications:Databricks Certified Professional or similar certifications.2.Machine Learning:Knowledge of machine learning concepts and experience with popular ML libraries.3.Knowledge of big data processing (e.g., Spark, Hadoop, Hive, Kafka)4.Data Orchestration:Apache Airflow.5.Knowledge of CI/CD pipelines and DevOps practices in a cloud environment.6.Experience with ETL tools like Informatica, Talend, Mati Llion, or Five Tran.7.Familiarity with DBT (Data Build Tool)Additional Information:- The candidate should have minimum 7.5 years of experience in Databricks Unified Data Analytics Platform.- This position is based at our Bengaluru office.&lt;br&gt;Educational Qualification:- 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/809028/data-architect-at-accenture-hr-aditi/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/809028/data-architect-at-accenture-hr-aditi/</link>
  <title>[Full Time] Data Architect at Accenture HR Aditi</title>
  <dc:date>Wed, 21 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/809494/database-administrator-at-sunquest-information/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;Purpose: &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; In this role you will get the opportunity to work with Clinisys which is a global leader in healthcare information technology. As a part of the Solution Adoption team, you will act as a liaison between the business and its customers and will be responsible to deliver impeccable service by providing high quality software solutions. As an Oracle DBA you will perform essential database functions from development through production including but not limited to database installations, upgrades, troubleshooting, and conversions for both client based and cloud hosted products. Additionally, in this role you will be accountable for documentation for all database functions. &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;strong&gt; Essential Functions / Job Responsibilities: &lt;/strong&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;strong&gt; &lt;/strong&gt; &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Install Oracle database patches and troubleshoot various issues related to database patching. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Perform production database health check. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Detect potential issues that may impact production database operation and resolve the issues. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Performs database support for both production and development environments, including the installation of databases, back-ups, replication, restores, patches, and upgrades. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Works closely with product development teams to create and manage product databases, solve problems, and optimize performance. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Provide product database support for client installations and knowledge of applying security patches on Oracle Databases. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Ability to configure replication as a part of disaster recovery process. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Identify and resolve database issues that impact application performance. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Follows appropriate sign-off and escalation procedures for database changes and version upgrades. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Document database installations, replications, upgrades, and conversions. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Stay ahead of the latest database versions and features. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Qualify latest database versions for application use and recommend and implement emerging database technologies. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Craft and deliver training materials as assigned. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Prepare reports or correspondence concerning project specifications, activities, or status. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; All other duties and responsibilities as assigned. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Strong command of Oracle, Oracle server tools and Oracle Data Guard. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Advanced knowledge of database security, backup and recovery, performance monitoring and tuning standards. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Understanding of relational and dimensional data modelling. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Strong mathematical and statistical knowledge. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Impeccable attention to detail. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Develop processes for optimizing database security. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Create and manage database reports, visualizations, and dashboards. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Create automation for repeating database tasks. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Be available for on-call support as needed. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;strong&gt; Skills needed to be successful &lt;/strong&gt; : &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Deep knowledge of Oracle servers/databases and Oracle database development and Oracle Data Guard. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Deep knowledge in both OLTP and OLAP database designs and administration. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Proven System Administration skills. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Innovative thinker with a commitment to improve processes and methodologies &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Strong written and verbal communications skills. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Excellent client management &amp;amp; client service skills &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; Create/Maintain Oracle Database Projects for development. &lt;/span&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;span&gt; Knowledge and experience preferred with AI-driven development and automation tools such as GitHub Copilot, Copilot Studio, and related platforms to optimize database administration tasks, implement intelligent query tuning, and enhance operational efficiency. Demonstrated understanding of AI concepts including Model Context Protocol (MCP), Prompt Engineering, and integration of AI-assisted workflows into DevOps and CI/CD pipelines. &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;strong&gt; Required Experience &amp;amp; Education: &lt;/strong&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; Bachelors degree in business or computer science, or equivalent education and work experience combined. &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; Minimum 3 to 5 years of Oracle database administration and Oracle Data Guard experience. &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; Prior exposure to software development processes and/or methodologies. &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; Familiarity with on premise and cloud-based implementations. &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;strong&gt; Preferred Experience &amp;amp; Education: &lt;/strong&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;strong&gt; &lt;/strong&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; Database certification(s) preferred. &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; Prior experience with server class hardware, operating systems, and virtualization. &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;strong&gt; Supervisory Responsibilities &lt;/strong&gt; : &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; None &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Software Product&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Sunquest Information&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/809494/database-administrator-at-sunquest-information/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/809494/database-administrator-at-sunquest-information/</link>
  <title>[Full Time] Database Administrator at Sunquest Information</title>
  <dc:date>Wed, 21 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/807806/semantic-ai-ml-architect-at-luxoft/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt;&lt;strong&gt;Project description&lt;/strong&gt;&lt;p&gt;We are seeking a Senior Semantic Engineer to design and implement semantic data frameworks that provide a shared structure for enterprise data. In this role you will focus on building and maintaining ontologies and knowledge graphs, enforcing semantic validation rules for data quality, and collaborating with AI teams to integrate these semantic structures into intelligent applications. The position is industry-agnostic, emphasizing strong semantic web expertise and the ability to apply it in any enterprise context. &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;strong&gt;Responsibilities&lt;/strong&gt;&lt;/div&gt;&lt;ul&gt;&lt;li&gt;Ontology Design Maintenance: Design, develop, and maintain ontologies (using OWL/RDF or similar) that model key enterprise data domains and relationships, ensuring a consistent and shared data vocabulary across the organization. This includes collaborating with domain experts to capture real-world concepts and validate that the ontology accurately represents business knowledge. &lt;/li&gt;&lt;li&gt;Knowledge Graph Development: Build and manage enterprise knowledge graphs based on the defined ontologies, linking diverse data sources into a unified graph data model. This involves configuring graph databases or triple stores, populating the knowledge graph with data (RDF triples), and optimizing it for query performance and scalability. &lt;/li&gt;&lt;li&gt;Semantic Querying (SPARQL): Create and optimize SPARQL queries to enable efficient retrieval, integration, and analysis of data from the knowledge graph. You will develop semantic queries and endpoints that support advanced search and analytics use cases, making it easier for others to retrieve insights from linked data. &lt;/li&gt;&lt;li&gt;Validation Rules Data Quality: Implement semantic validation rules and consistency checks (e.g., using SHACL or OWL constraints) to ensure data integrity and quality within the ontology and knowledge graph. You will define and enforce data modelling conventions and business rules so that enterprise data conforms to the ontology&apos;&apos;s standards and remains interoperable across systems. &lt;/li&gt;&lt;li&gt;Integration with Enterprise Systems: Work closely with software engineers, data architects, and IT teams to integrate the ontology and knowledge graph into the organization&apos;&apos;s existing data infrastructure and workflows. This includes embedding semantic models in data pipelines, APIs, and databases, so that enterprise applications can produce and consume linked data seamlessly. &lt;/li&gt;&lt;li&gt;Collaboration Cross-Functional Support: Collaborate with cross-functional teams and stakeholders. For example, partner with AI/ML teams to incorporate the knowledge graph into AI-driven solutions, and team up with business analysts or data stewards to align the semantic models with business needs. You will communicate semantic concepts to non-technical stakeholders, providing training or documentation to ensure adoption of the semantic framework across the organisation. &lt;/li&gt;&lt;li&gt;Integration with AI Agents: Work with AI agents and large language model (LLM) teams to leverage the ontology and knowledge graph for intelligent applications. For instance, you might enable an AI chatbot to use the knowledge graph for more context-aware responses, or develop mechanisms for AI systems to perform reasoning over the ontologies. This responsibility ensures that semantic data structures enhance AI initiatives (e.g. improving context, disambiguation, and knowledge retrieval in AI workflows). &lt;/li&gt;&lt;li&gt;Standards Best Practices: Stay current with emerging semantic web standards, tools, and best practices. Continuously improve the semantic architecture by adopting relevant metadata standards and ensuring alignment with industry best practices for ontologies and knowledge graphs. You will also contribute to establishing internal guidelines and best practices for semantic data management, promoting a culture of well-structured, semantically-rich data across the enterprise. &lt;/li&gt;&lt;/ul&gt;&lt;div&gt;&lt;strong&gt;Skills&lt;/strong&gt;&lt;/div&gt;&lt;p&gt;&lt;strong&gt;Must have&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Ontology Design Maintenance: Design, develop, and maintain ontologies (using OWL/RDF or similar). &lt;/li&gt;&lt;li&gt;Semantic Web Proficiency: Strong knowledge of semantic web technologies and standards &lt;/li&gt;&lt;li&gt;specifically, hands-on proficiency with OWL (Web Ontology Language) and RDF (Resource Description Framework) for ontology modelling, as well as SPARQL for querying graph data. &lt;/li&gt;&lt;li&gt;Knowledge Graph Experience: Practical experience building or maintaining knowledge graphs or linked data systems in an enterprise setting. &lt;/li&gt;&lt;li&gt;Data Modelling Integration Skills: A solid understanding of data modelling principles, data architecture, and integrating heterogeneous data sources. You should be capable of abstracting real-world entities into a semantic schema and mapping relational or NoSQL data to an ontology. &lt;/li&gt;&lt;li&gt;Programming Skills: Proficiency in at least one programming or scripting language (such as Python, Java, or similar) &lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Nice to have &lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Metadata Standards: Familiarity with metadata standards and vocabularies such as Dublin Core, schema.org, or other industry-specific ontologies/taxonomies. Experience applying these standards to annotate or integrate data &lt;/li&gt;&lt;li&gt;AI and LLM Integration: Experience working on projects that involve AI agents or large language models, where ontologies or knowledge graphs were used to improve AI performance. &lt;/li&gt;&lt;li&gt;Enterprise System Integration: Proven experience integrating semantic technologies into existing enterprise systems or data platforms. &lt;/li&gt;&lt;li&gt;Tools Platforms: Hands-on experience with ontology and knowledge graph tools is beneficial. &lt;/li&gt;&lt;/ul&gt;&lt;div&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Legal&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Architect / Designer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Luxoft&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/807806/semantic-ai-ml-architect-at-luxoft/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/807806/semantic-ai-ml-architect-at-luxoft/</link>
  <title>[Full Time] Semantic AI/ML Architect at Luxoft</title>
  <dc:date>Wed, 21 Jan 2026 07:39:52 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/807868/senior-database-administrator-at-sunquest-information/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;ul&gt; &lt;li&gt; Clinisys is a global provider of intelligent diagnostic informatics solutions and expertise designed to redefine the modern laboratory, across healthcare, life sciences, and public health. &lt;/li&gt; &lt;li&gt; Millions of diagnostic results and data insights are generated every day using Clinisysplatform and cloudbased solutions in over 3,000 laboratories across 34 countries. &lt;/li&gt; &lt;li&gt; Headquartered in Tucson, Arizona, and Woking, England, Clinisysmission is to enhance the effectiveness of diagnostic workflows in any laboratory or testing environment and keep citizens and communities healthier and safer. &lt;/li&gt; &lt;li&gt; Purpose. &lt;/li&gt; &lt;li&gt; As a Senior Database Engineer, you will be responsible for the design, implementation, maintenance, and optimization of complex database systems. &lt;/li&gt; &lt;li&gt; This includes ensuring data integrity, security, performance, and availability, leading projects, mentoring junior DBAs, and making strategic decisions regarding database architecture and infrastructure within Clinisys. &lt;/li&gt; &lt;li&gt; You will work closely with data architects, software architects, software engineers, and other business stakeholders to ensure that our database designs employ best practices of schema design and optimization in the context of each product application. &lt;/li&gt; &lt;li&gt; This role requires a proactive and resourceful individual with a solid understanding of database and schema design, performance tuning, production application development, and cloud architecture. &lt;/li&gt; &lt;li&gt; Essential Functions/ Job Responsibilities. &lt;/li&gt; &lt;li&gt; Administer, maintain, and enhance database systems supporting SaaS applications. &lt;/li&gt; &lt;li&gt; Ensure high availability, performance, and security of database environments. &lt;/li&gt; &lt;li&gt; Troubleshoot and resolve complex database issues across multiple platforms, including Oracle, SQL Server, and Sybase, ensuring minimal downtime and quick resolution. &lt;/li&gt; &lt;li&gt; Identify and resolve performance bottlenecks, deadlocks, and query execution issues. &lt;/li&gt; &lt;li&gt; Diagnose issues related to replication, data corruption, and storage optimization. &lt;/li&gt; &lt;li&gt; Utilize advanced diagnostic tools and methodologies to uncover root causes. &lt;/li&gt; &lt;li&gt; Develop and execute comprehensive backup and recovery strategies. &lt;/li&gt; &lt;li&gt; Collaborate with software development teams to integrate database solutions with applications. &lt;/li&gt; &lt;li&gt; Monitor database performance and implement improvements as needed. &lt;/li&gt; &lt;li&gt; Manage database access, roles and security policies. &lt;/li&gt; &lt;li&gt; Stay up to date with the latest cloud-managed database technologies and best practices. &lt;/li&gt; &lt;li&gt; Skills Needed To Be Successful. &lt;/li&gt; &lt;li&gt; Proven experience as a Database Administrator or Engineer, with a focus on Oracle databases, schema design, and product development. &lt;/li&gt; &lt;li&gt; Experience with monitoring with Oracle Enterprise Manager. &lt;/li&gt; &lt;li&gt; Experience with DataGuard and Oracle DR/BC configuration. &lt;/li&gt; &lt;li&gt; Proficiency in SQL and NoSQL databases. &lt;/li&gt; &lt;li&gt; Experience with healthcare and/or laboratory software is highly desired. &lt;/li&gt; &lt;li&gt; Ensuring compliance with data privacy regulations (GDPR, HIPAA) and industry standards. &lt;/li&gt; &lt;li&gt; Experience with cloud database platforms; Azure managed database experience is highly desired. &lt;/li&gt; &lt;li&gt; Experience in implementing high-availability solutions such as clustering, mirroring, and replication. &lt;/li&gt; &lt;li&gt; Experience with high traffic and highly available website architectures and operations. &lt;/li&gt; &lt;li&gt; Knowledge and experience preferred with AI-driven development and automation tools such as GitHub Copilot, Copilot Studio, and related platforms to optimize database administration tasks, implement intelligent query tuning, and enhance operational efficiency. &lt;/li&gt; &lt;li&gt; Demonstrated understanding of AI concepts including Model Context Protocol (MCP), Prompt Engineering, and integration of AI-assisted workflows into DevOps and CI/CD pipelines. &lt;/li&gt; &lt;li&gt; Required Experience &amp;amp; Education. &lt;/li&gt; &lt;li&gt; Bachelor&apos;s degree in business or computer science, or equivalent education and work experience combined. &lt;/li&gt; &lt;li&gt; Minimum 5 to 7 years of Oracle database administration and Oracle Data Guard experience. &lt;/li&gt; &lt;li&gt; Familiarity with database monitoring tools and automation techniques for proactive issue resolution. &lt;/li&gt; &lt;li&gt; Strong understanding of software product development and related processes. &lt;/li&gt; &lt;li&gt; Excellent problem-solving and analytical skills. &lt;/li&gt; &lt;li&gt; Strong communication and collaboration skills. &lt;/li&gt; &lt;li&gt; Preferred Experience &amp;amp; Education. &lt;/li&gt; &lt;li&gt; Database certification(s) preferred. &lt;/li&gt; &lt;li&gt; Prior experience with server class hardware, operating systems, and virtualization. &lt;/li&gt; &lt;li&gt; Supervisory Responsibilities. &lt;/li&gt; &lt;li&gt; None. &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Software Product&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Sunquest Information&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/807868/senior-database-administrator-at-sunquest-information/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/807868/senior-database-administrator-at-sunquest-information/</link>
  <title>[Full Time] Senior Database Administrator at Sunquest Information</title>
  <dc:date>Wed, 21 Jan 2026 00:54:13 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/808473/salesforce-developer-and-admin-iss-at-amazon/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;As a Salesforce Developer &amp;amp; Admin you will be responsible for the day-to-day operations and maintece of Salesforce and Amazon Web Services (AWS) systems. you will provide technical support to ensure a smooth customer experience. &lt;br&gt; Respond to customer inquiries in a timely manner, resolving issues with Salesforce and AWS. &lt;br&gt; Consistently document root cause analysis steps and findings, recommended solutions. &lt;br&gt; Monitor customer service areas to identify potential system issues that could impact customer experience. &lt;br&gt; Complete setup and configuration of Salesforce customer accounts and configurations. &lt;br&gt; Maintain and improve existing Salesforce and AWS applications and processes. &lt;br&gt; Implement changes to applications within Salesforce and AWS. &lt;br&gt; Develop requirements and specifications to meet customer needs. &lt;br&gt; Install, configure, and test new applications, software, and hardware in a timely manner. &lt;br&gt; Develop and execute effective system maintece plans. &lt;br&gt; Consult with stakeholders on application performance, scalability, and usability. &lt;br&gt; Ensure compliance with security protocols for Salesforce and AWS systems. &lt;br&gt; Resolve system problems efficiently. Bachelors degree in Computer Science, Engineering or a related field &lt;br&gt; 3+ years of Salesforce development experience in Service and Sales Cloud environments Expert-level knowledge of Apex, SOQL, Lightning Components, and Visualforce Excellent experience with Salesforce integrations (REST/SOAP APIs) Proficiency in JavaScript and Python Experience with version control systems (Git) Experience with security concepts (authentication, authorization, encryption, digital signature, SSL, web service proxies, firewall, SAML 2.0, OAuth 2.0) Experience with AWS services Salesforce certifications (Salesforce Platform Developer II certification, Application Architect, AI Specialist). &lt;br&gt; Experience working with Salesforce Marketing Cloud. &lt;br&gt; Exceptional quantitative and qualitative problem-solving skills. &lt;br&gt; Enjoys rolling up their sleeves in a fast-paced, startup environment &lt;br&gt; Ability to thrive in an ambiguous environment, and simultaneously manage multiple medium-to-large projects. &lt;br&gt; Is able to work cross-functionally with many teams and multi-tasks with a sense of urgency. &lt;br&gt; Has impeccable attention to detail, follow-through, and resourcefulness. &lt;br&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Internet&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Amazon&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/808473/salesforce-developer-and-admin-iss-at-amazon/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/808473/salesforce-developer-and-admin-iss-at-amazon/</link>
  <title>[Full Time] Salesforce Developer and Admin, ISS at Amazon</title>
  <dc:date>Tue, 20 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/808284/skilled-it-warehouse-logistics-vendor-specialist-at-trigent/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&lt;strong&gt;&lt;u&gt;Greetings from Trigent Software!!&lt;/u&gt;&lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;&lt;u&gt;Hiring for Skilled IT Logistics &amp;amp; Warehouse Support Specialist&lt;/u&gt;&lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Role: IT Logistics &amp;amp; Warehouse Support Specialsit (Only male)&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Mode of Work: Work from Office&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Location: Nanakramguda &amp;amp; Gachibowli&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Working Days: 6 days&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Shift: Flexible with shifts&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Qualification: Undergraduates&lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Salary: From 3 LPA to 3.75 LPA&lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;&lt;u&gt;Responsibilities:&lt;/u&gt;&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Handling &lt;strong&gt;IT assets like laptops, desktops, monitors, UPS, and accessories&lt;/strong&gt;&lt;/li&gt;&lt;li&gt; Performing laptop imaging and basic technical checks&lt;/li&gt;&lt;li&gt; Tracking&lt;strong&gt; IT assets in inventory systems and Excel reports&lt;/strong&gt;&lt;/li&gt;&lt;li&gt; Managing forward, reverse, and break-fix shipments&lt;/li&gt;&lt;li&gt; Coordinating with &lt;strong&gt;courier, logistics, and repair vendors&lt;/strong&gt;&lt;/li&gt;&lt;li&gt; Following up on RMAs, repairs, and replacements&lt;/li&gt;&lt;li&gt; Ensuring correct documentation for all IT asset movements&lt;/li&gt;&lt;li&gt; Conducting inventory audits and stock validation&lt;/li&gt;&lt;li&gt; Maintaining safety stock and supporting bulk shipments&lt;/li&gt;&lt;li&gt; Preparing reports and sharing updates with IT and operations teams&lt;/li&gt;&lt;li&gt; Ensuring warehouse SOPs and SLAs are followed&lt;/li&gt;&lt;/ul&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Rounds of Interview: Screening round, Manager round &amp;amp; HR round&lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;&lt;u&gt;Preferred Candidates Profiles:&lt;/u&gt;&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Experience in&lt;strong&gt; IT asset management / IT logistics / warehouse operations&lt;/strong&gt;&lt;/li&gt;&lt;li&gt;Good knowledge of &lt;strong&gt;Excel, inventory tracking, and documentation&lt;/strong&gt;&lt;/li&gt;&lt;li&gt;Experience working with vendors and handling escalations&lt;/li&gt;&lt;li&gt;Able to manage data, reports, and daily operations independently&lt;/li&gt;&lt;li&gt;Comfortable working in a &lt;strong&gt;warehouse and logistics setup&lt;/strong&gt;&lt;/li&gt;&lt;/ul&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;&lt;u&gt;Perks and Benefits:&lt;/u&gt;&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;* 2 - way cab facility&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;* Food facility&lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Interested candidates can contact&lt;/strong&gt; &lt;strong&gt;HR Reena at @8072181834&lt;/strong&gt; &lt;strong&gt;or can share their resumes to&lt;/strong&gt; &lt;strong&gt;reena_s@trigent.com&lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Regards,&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;HR Reena&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Trigent Software&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;8072181834&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;reena_s@trigent.com&lt;/strong&gt;&lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;BPM / BPO&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;DBA / Data warehousing - Other&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Trigent&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/808284/skilled-it-warehouse-logistics-vendor-specialist-at-trigent/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/808284/skilled-it-warehouse-logistics-vendor-specialist-at-trigent/</link>
  <title>[Full Time] Skilled - IT Warehouse, Logistics &amp;amp; Vendor Specialist at Trigent</title>
  <dc:date>Tue, 20 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/808910/senior-software-engineer-i-at-optum/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;span&gt; &lt;strong&gt; Primary Responsibilities: &lt;/strong&gt; &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; Design, develop, and maintain COBOL/JCL programs to support core mainframe applications for consumer-directed healthcare accounts (HRA, FSA, HIA, RMSA). &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Ensure stability, scalability, and performance of mainframe systems while adhering to high standards of quality and reliability &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Collaborate with cross-functional teams to integrate distributed architecture components and support modernization initiatives involving Java and messaging frameworks &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Analyze business requirements, translate them into technical solutions, and implement enhancements to existing systems &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Optimize workflows and batch processes using SORT JCL and other mainframe utilities &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Work with VSAM and DB2 for efficient data storage, retrieval, and transaction processing &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Utilize TSO/ISPF for code development, debugging, and system monitoring &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Participate in code reviews, testing, and deployment activities to ensure robust and maintainable solutions &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Stay updated on emerging technologies in mainframe modernization and distributed systems, introducing innovative approaches where applicable &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Provide production support and troubleshoot complex issues to maintain system uptime and reliability &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Mentor junior developers and share best practices for mainframe development and modernization &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Comply with the terms and conditions of the employment contract, company policies and procedures, and any and all directives (such as, but not limited to, transfer and/or re-assignment to different work locations, change in teams and/or work shifts, policies in regard to flexibility of work benefits and/or work environment, alternative work arrangements, and other decisions that may arise due to the changing business environment). The Company may adopt, vary or rescind these policies and directives in its absolute discretion and without any limitation (implied or otherwise) on its ability to do so &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;span&gt; &lt;strong&gt; Required Qualifications: &lt;/strong&gt; &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt; Bachelor&apos;&apos;s degree in Computer Science or related field; or equivalent experience &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; &lt;span&gt; 5+ years of hands-on experience in mainframe development and maintenance &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Experience with distributed architecture and messaging queues &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Solid proficiency in COBOL, JCL, VSAM, DB2, and mainframe utilities &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Working knowledge of Java for modernization and integration projects &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Familiarity with TSO/ISPF environments and SORT JCL for batch processing &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Ability to analyze complex problems and deliver simple, effective solutions. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Proven excellent communication skills and ability to work independently as an individual contributor &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt; Flexible and adaptable to changing project needs and technologies &lt;/span&gt; &amp;nbsp; &lt;/li&gt; &lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Retail&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Optum&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Noida, Gurugram&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/808910/senior-software-engineer-i-at-optum/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/808910/senior-software-engineer-i-at-optum/</link>
  <title>[Full Time] Senior. Software Engineer I at Optum</title>
  <dc:date>Tue, 20 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/808958/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt;&amp;nbsp;About The Role &lt;/b&gt;&amp;nbsp;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Azure Data Services&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;Python (Programming Language), Data Engineering, Microsoft Power Business Intelligence (BI)&lt;br&gt;Minimum &lt;b&gt;5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education&lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve collaborating with various teams to model and design the application data structure, ensuring optimal storage and integration solutions are in place. You will engage in discussions to understand the data needs of the application and work towards creating a robust architecture that supports the overall goals of the project. Your role will require you to analyze existing data systems and propose enhancements to improve efficiency and effectiveness in data handling. Roles &amp;amp; Responsibilities:&lt;li&gt;Expected to be an SME.&lt;/li&gt;&lt;li&gt;Collaborate and manage the team to perform.&lt;/li&gt;&lt;li&gt;Responsible for team decisions.&lt;/li&gt;&lt;li&gt;Engage with multiple teams and contribute on key decisions.&lt;/li&gt;&lt;li&gt;Provide solutions to problems for their immediate team and across multiple teams.&lt;/li&gt;&lt;li&gt;Facilitate knowledge sharing sessions to enhance team capabilities.&lt;/li&gt;&lt;li&gt;Evaluate and implement best practices in data architecture and management. Professional &amp;amp; Technical &lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;&lt;br&gt;Must To Have &lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;&lt;br&gt;Proficiency in Microsoft Azure Data Services.&lt;/li&gt;&lt;li&gt;Good To Have &lt;b&gt;Skills:&lt;/b&gt;&lt;/li&gt;&lt;li&gt;Experience with Data Engineering, Microsoft Power Business Intelligence (BI), Python (Programming Language).&lt;/li&gt;&lt;li&gt;Strong understanding of data modeling techniques and best practices.&lt;/li&gt;&lt;li&gt;Experience with cloud-based data storage solutions and integration methods.&lt;/li&gt;&lt;li&gt;Familiarity with data governance frameworks and compliance requirements. Additional Information:&lt;/li&gt;&lt;li&gt;The candidate should have minimum 5 years of experience in Microsoft Azure Data Services.&lt;/li&gt;&lt;li&gt;This position is based at our Hyderabad office.&lt;/li&gt;&lt;li&gt;A 15 years full time education is required.&lt;b&gt;&amp;nbsp;Qualification&lt;/b&gt;&amp;nbsp;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/808958/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/808958/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Mon, 19 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/807802/ab-initio-developer-at-capgemini/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;div&gt; &lt;div&gt; &lt;b&gt; &lt;b&gt;Your Role&lt;/b&gt; &lt;/b&gt;&lt;br&gt;&lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;span&gt;Design, develop, and manage data ETL processes using Ab&lt;/span&gt; &lt;span&gt;?&lt;/span&gt; &lt;span&gt;Initio (GDE, Graphs, Plans, PDL), ensuring performance and reliability. &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Conduct data profiling, cleansing, and transformation across batch and real-time interfaces &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Collaborate with data architects, DBAs, QA analysts, and business teams during requirement analysis, design, and implementation phases &lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Establish best practices and standards??code reviews, documentation, optimization, automation, and monitoring. &lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;b&gt; &lt;b&gt;Your Profile&lt;/b&gt; &lt;/b&gt;&lt;br&gt;&lt;/div&gt; &lt;div&gt; &lt;ul&gt; &lt;li&gt; &lt;strong&gt; &lt;span&gt;Minimum 5 years of hands-on Ab Initio development experience&lt;/span&gt; &lt;/strong&gt; &lt;span&gt; in enterprise environments.&lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Strong expertise with Ab Initio components: &lt;strong&gt;Rollup, Scan, Join, Partitioning, Normalize&lt;/strong&gt;.&lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Proven experience building and managing &lt;strong&gt;Vectors&lt;/strong&gt; and implementing &lt;strong&gt;Plans and Conduct&amp;gt;IT&lt;/strong&gt;.&lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Solid experience with &lt;strong&gt;PDL and meta programming&lt;/strong&gt; to create parameterized, reusable graphs.&lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Practical exposure to &lt;strong&gt;Express&amp;gt;IT&lt;/strong&gt; or similar orchestration tools.&lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Strong SQL skills and experience working with relational and semi-structured data.&lt;/span&gt; &lt;/li&gt; &lt;li&gt; &lt;span&gt;Excellent debugging and performance tuning skills for large data volumes.&lt;/span&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;div&gt; &lt;b&gt; &lt;b&gt;&lt;/b&gt;&lt;/b&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;ETL Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Capgemini&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/807802/ab-initio-developer-at-capgemini/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/807802/ab-initio-developer-at-capgemini/</link>
  <title>[Full Time] Ab Initio Developer at Capgemini</title>
  <dc:date>Mon, 19 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/807942/support-engineer-iii-just-walk-out-tech-at-amazon/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;As a Support Engineer (Technical Operations Center Engineer), you seek resolution to problems and mitigate risk, always ensuring a Customer Obsessed experience has occurred. You will be working on services with a direct impact on the customer experience. If you are excited about the opportunity to learn and work on distributed systems, enjoy trouble shooting and solving complex problems, consider the opportunities to work with Amazon Physical Stores. You will help solve a variety of challenges and offer your expertise in growing the knowledge of your peers via team collaboration. You will be counted on to identify areas of improvement and drive projects to implement them. We consistently whiteboard so be comfortable writing and supporting your ideas on the team board. You will play an active role in defining the support processes for technologies in partnership with other technology leaders within and possibly outside the team. You should be comfortable with a level of ambiguity that s higher than most projects and relish the idea of solving big challenges. You will also mentor other engineers in your area of expertise. Along the way, we guarantee that you ll work hard, have fun and impact many customers! This role requires the flexibility to work 5 days a week (occasionally on weekends) on a rotational basis. AWS Support is 24x7x365 operations and work timings for this role is in India night time i.e. 10 PM to 6 AM IST or 1 PM to 10 PM IST. You are expected to work in night shifts hours based on business requirements. About the team &lt;br&gt; Diverse Experiences &lt;br&gt; Amazon values diverse experiences. Even if you do not meet all of the preferred qualifications and skills listed in the job description, we encourage candidates to apply. If your career is just starting, hasn t followed a traditional path, or includes alternative experiences, don t let it stop you from applying. . &lt;br&gt; Work/Life Balance &lt;br&gt; We value work-life harmony. Achieving success at work should never come at the expense of sacrifices at home, which is why we strive for flexibility as part of our working culture. When we feel supported in the workplace and at home, there s nothing we can t achieve. &lt;br&gt; Inclusive Team Culture &lt;br&gt; AWS values curiosity and connection. Our employee-led and company-sponsored affinity groups promote inclusion and empower our people to take pride in what makes us unique. Our inclusion events foster stronger, more collaborative teams. Our continual innovation is fueled by the bold ideas, fresh perspectives, and passionate voices our teams bring to everything we do. &lt;br&gt; Mentorship and Career Growth &lt;br&gt; We re continuously raising our performance bar as we strive to become Earth s Best Employer. That s why you ll find endless knowledge-sharing, mentorship and other career-advancing resources here to help you develop into a better-rounded professional. 2+ years of software development, or 2+ years of technical support experience &lt;br&gt; Experience scripting in modern program languages &lt;br&gt; Experience troubleshooting and debugging technical systems Knowledge of web services, distributed systems, and web application development &lt;br&gt; Experience troubleshooting maintaining hardware software RAID &lt;br&gt; Experience with REST web services, XML, JSON &lt;br&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Internet&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Amazon&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/807942/support-engineer-iii-just-walk-out-tech-at-amazon/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/807942/support-engineer-iii-just-walk-out-tech-at-amazon/</link>
  <title>[Full Time] Support Engineer III, Just Walk Out Tech at Amazon</title>
  <dc:date>Mon, 19 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/807947/senior-software-engineer-at-walmart/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;Position Summary...&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt;What youll do...&lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;p&gt; &lt;b&gt;About the team:&lt;/b&gt; &lt;/p&gt; &lt;p&gt;We are developing a product that delivers end-to-end visibility into direct import flows, tracking shipments from suppliers through foreign ports and U.S. ports to import distribution centers. This tactical solution provides granular, daily insights into direct import purchase orders, enabling replenishment and sourcing managers to better synchronize supply and demand. By offering capabilities such as inventory projection, purchase order tracking, and pre-production monitoring for both direct and domestic imports, the product drives improved decision-making and delivers measurable cost savings. If you are seeking a high-impact, high-growth role with complex engineering challenges in a fast-paced environment, this opportunity is an excellent fit.&lt;/p&gt; &lt;p&gt; &lt;b&gt;What You ll Do:&lt;/b&gt; &lt;/p&gt; &lt;p&gt;As a Senior, Software Engineer for Walmart Global Tech, you ll have the opportunity to&lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;Design and Develop complex, scalable and fault tolerant software systems using Java, Springboot, Python.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Work with the Product team to understand Customer requirements and work with them closely.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Write Unit and regression tests for any code that is developed.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Do production deployments using platforms automated CI and deployment tools.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Be an advocate for keeping Cloud Infra cost low and always strive for better performance on the products being built.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Partner with senior and junior teammates throughout the software development lifecycle to cultivate a learning and team building.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Be a core scrum team member and follow agile best practices&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt; &lt;b&gt;What you will bring:&lt;/b&gt; &lt;/p&gt; &lt;ul&gt; &lt;li&gt; &lt;p&gt;B.Tech. / B.E. / M.Tech. / M.S. in Computer Science or relevant discipline&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;6+ years of experience in development of highly-scalable distributed applications and platforms - particularly for multi-tenant, SaaS products.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Strong computer science fundamentals: data structures, algorithms, design patterns.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Hands on experience in Microservices, RESTful webservices development in Java SpringBoot or equivalent framework.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Knowledge of API standards and best practices&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Proficient in working with relational databases (e.g., MySQL, PostgreSQL, Oracle) and NOSQL Databases like Cosmos DB, Cassandra&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Good knowledge in messaging systems: Kafka / RabbitMQ&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Knowledge of at least one of Azure/Google Cloud Platforms&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Good to have an understanding of Gen AI Frameworks&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Strong ability to adapt to change quickly. Proficient in new and emerging technologies.&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Good communication and interpersonal skills&lt;/p&gt; &lt;/li&gt; &lt;li&gt; &lt;p&gt;Practitioner of Agile (Scrum) methodology.&lt;/p&gt; &lt;/li&gt; &lt;/ul&gt; &lt;p&gt;Technical Skills:&lt;br&gt;Core Skills : JAVA,J2EE, SpringBoot, Python&lt;br&gt;DataBase: SQL Database&lt;br&gt;Messaging and communication: Kafka/RESTful - Basic debugging knowledge&lt;br&gt;App monitoring tools: Splunk, Dynatrace/Appdynamics - Basic debugging knowledge&lt;br&gt;Cloud Technologies: MS Azure, GCP&lt;/p&gt; &lt;p&gt; &lt;b&gt;About Walmart Global Tech&lt;/b&gt; &lt;br&gt;Imagine working in an environment where one line of code can make life easier for hundreds of millions of people. That s what we do at Walmart Global Tech. We re a team of software engineers, data scientists, cybersecurity experts and service professionals within the world s leading retailer who make an epic impact and are at the forefront of the next retail disruption. People are why we innovate, and people power our innovations. We are people-led and tech-empowered.&lt;/p&gt; &lt;p&gt;We train our team in the skillsets of the future and bring in experts like you to help us grow. We have roles for those chasing their first opportunity as well as those looking for the opportunity that will define their career. Here, you can kickstart a great career in tech, gain new skills and experience for virtually every industry, or leverage your expertise to innovate at scale, impact millions and reimagine the future of retail.&lt;br&gt;Walmart s culture sets us apart, and we know being together helps us innovate, learn and grow great careers. This role is based in our [Bangalore/Chennai] office for daily work, with the flexibility for associates to manage their personal lives.&lt;/p&gt; &lt;p&gt; &lt;b&gt;Benefits&lt;/b&gt; &lt;/p&gt; &lt;p&gt;Beyond our great compensation package, you can receive incentive awards for your performance. Other great perks include a host of best-in-class benefits maternity and parental leave, PTO, health benefits, and much more.&lt;/p&gt; &lt;p&gt; &lt;b&gt;Belonging&lt;/b&gt; &lt;/p&gt; &lt;p&gt;We aim to create a culture where every associate feels valued for who they are, rooted in respect for the individual. Our goal is to foster a sense of belonging, to create opportunities for all our associates, customers and suppliers, and to be a Walmart for everyone.&lt;/p&gt; &lt;p&gt;At Walmart, our vision is &quot;everyone included.&quot; By fostering a workplace culture where everyone is and feels included, everyone wins. Our associates and customers reflect the makeup of all 19 countries where we operate. By making Walmart a welcoming place where all people feel like they belong, we re able to engage associates, strengthen our business, improve our ability to serve customers, and support the communities where we operate.&lt;/p&gt; &lt;p&gt; &lt;b&gt;Equal Opportunity Employer&lt;/b&gt; &lt;/p&gt; &lt;p&gt;Walmart, Inc., is an Equal Opportunities Employer By Choice. We believe we are best equipped to help our associates, customers and the communities we serve live better when we really know them. That means understanding, respecting and valuing unique styles, experiences, identities, ideas and opinions while being inclusive of all people.&lt;/p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; &lt;span&gt;Minimum Qualifications...&lt;/span&gt; &lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;i&gt; &lt;span&gt; &lt;i&gt;Outlined below are the required minimum qualifications for this position. If none are listed, there are no minimum qualifications. &lt;/i&gt; &lt;/span&gt; &lt;/i&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; Option 1: Bachelors degree in computer science, computer engineering, computer information systems, software engineering, or related area and 3 years experience in software engineering or related area.&lt;br&gt;Option 2: 5 years experience in software engineering or related area. &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; &lt;span&gt;Preferred Qualifications...&lt;/span&gt; &lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;p&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;i&gt; &lt;i&gt;Outlined below are the optional preferred qualifications for this position. If none are listed, there are no preferred qualifications. &lt;/i&gt; &lt;/i&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/p&gt; Master s degree in computer science, information technology, engineering, information systems, cybersecurity, or related area and 1 year s experience leading information security or cybersecurity projects, We value candidates with a background in creating inclusive digital experiences, demonstrating knowledge in implementing Web Content Accessibility Guidelines (WCAG) 2.2 AA standards, assistive technologies, and integrating digital accessibility seamlessly. The ideal candidate would have knowledge of accessibility best practices and join us as we continue to create accessible products and services following Walmart s accessibility standards and guidelines for supporting an inclusive culture. Information Technology - CISCO Certification - Certification &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;span&gt; &lt;b&gt; &lt;span&gt;Primary Location...&lt;/span&gt; &lt;/b&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; &lt;/span&gt; BLOCK- 1, PRESTIGE TECH PACIFIC PARK, SY NO. 38/1, OUTER RING ROAD KADUBEESANAHALLI, , India&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Walmart&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/807947/senior-software-engineer-at-walmart/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/807947/senior-software-engineer-at-walmart/</link>
  <title>[Full Time] Senior, Software Engineer at Walmart</title>
  <dc:date>Mon, 19 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/808815/custom-software-engineer-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt; &lt;br&gt;Project Role :&lt;/b&gt;Custom Software Engineer&lt;br&gt; &lt;b&gt; Project Role Description :&lt;/b&gt;Develop custom software solutions to design, code, and enhance components across systems or applications. Use modern frameworks and agile practices to deliver scalable, high-performing solutions tailored to specific business needs. &lt;br&gt; &lt;b&gt;Must have skills :&lt;/b&gt;Informatica MDM&lt;br&gt; &lt;b&gt; &lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;5&lt;/b&gt; year(s) of experience is required&lt;br&gt; &lt;b&gt; &lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education &lt;b&gt;Summary&lt;/b&gt;:As a Data Platform Engineer, you will assist with the data platform blueprint and design, encompassing the relevant data platform components. Your typical day will involve collaborating with Integration Architects and Data Architects to ensure cohesive integration between systems and data models, while also engaging in discussions to refine and enhance the overall data architecture strategy. &lt;div&gt; &lt;b&gt;Roles &amp;amp; Responsibilities:&lt;/b&gt; &lt;/div&gt; &lt;li&gt;Expected to be an SME.&lt;/li&gt; &lt;li&gt;Collaborate and manage the team to perform.&lt;/li&gt; &lt;li&gt;Responsible for team decisions.&lt;/li&gt; &lt;li&gt;Engage with multiple teams and contribute on key decisions.&lt;/li&gt; &lt;li&gt;Provide solutions to problems for their immediate team and across multiple teams.&lt;/li&gt; &lt;li&gt;Facilitate knowledge sharing sessions to enhance team capabilities.&lt;/li&gt; &lt;li&gt;Monitor project progress and ensure alignment with strategic goals. &lt;b&gt;Professional &amp;amp; Technical Skills:&lt;/b&gt; &lt;br&gt;Must To Have Skills:&lt;br&gt;Proficiency in Informatica MDM.&lt;/li&gt; &lt;li&gt;Strong understanding of data integration techniques and best practices.&lt;/li&gt; &lt;li&gt;Experience with data modeling and database design.&lt;/li&gt; &lt;li&gt;Familiarity with ETL processes and data warehousing concepts.&lt;/li&gt; &lt;li&gt;Ability to troubleshoot and resolve data-related issues efficiently. &lt;br&gt;Additional Information:&lt;/li&gt; &lt;li&gt;The candidate should have minimum 7.5 years of experience in Informatica MDM.&lt;/li&gt; &lt;li&gt;This position is based at our Bengaluru office.&lt;/li&gt; &lt;li&gt;A 15 years full time education is required.&lt;br&gt; Qualification15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Kolkata&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/808815/custom-software-engineer-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/808815/custom-software-engineer-at-accenture/</link>
  <title>[Full Time] Custom Software Engineer at Accenture</title>
  <dc:date>Mon, 19 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/808816/test-lead-data-at-hexaware-technologies/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;ul&gt;&lt;li&gt;Should have strong experience in ETL testing with 6 to 9 years &lt;/li&gt;&lt;li&gt;Should be strong in SQL query writing&lt;/li&gt;&lt;li&gt;Experience with data warehousing and ETL processes&lt;/li&gt;&lt;li&gt;Proficiency in Hadoop, Spark, and Hive&lt;/li&gt;&lt;li&gt;Knowledge of SQL and NoSQL databases&lt;/li&gt;&lt;li&gt;Experience with data warehousing and ETL processes&lt;/li&gt;&lt;li&gt;This role involves working closely with data engineers, data analysts, and other stakeholders to ensure the accuracy, efficiency, and reliability of big data systems and applications&lt;/li&gt;&lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Hexaware Technologies&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Mumbai&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/808816/test-lead-data-at-hexaware-technologies/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/808816/test-lead-data-at-hexaware-technologies/</link>
  <title>[Full Time] Test Lead - Data at Hexaware Technologies</title>
  <dc:date>Mon, 19 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/809039/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;b&gt; About The Role &lt;/b&gt; &lt;br&gt; &lt;b&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt; &lt;b&gt; Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt; &lt;b&gt;Must have skills :&lt;/b&gt;Microsoft Azure Data Services &lt;b&gt; &lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;7.5&lt;/b&gt; year(s) of experience is required &lt;b&gt; &lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education &lt;b&gt;Summary&lt;/b&gt;:As a Data Architect, you will define the data requirements and structure for the application. Your typical day will involve modeling and designing the application data structure, storage, and integration, ensuring that the data architecture aligns with business objectives and supports efficient data management practices. You will collaborate with various stakeholders to gather requirements and translate them into effective data solutions, while also overseeing the implementation of data strategies that enhance data accessibility and usability across the organization. Roles &amp;amp; Responsibilities:- Expected to be an SME.- Collaborate and manage the team to perform.- Responsible for team decisions.- Engage with multiple teams and contribute on key decisions.- Provide solutions to problems for their immediate team and across multiple teams.- Facilitate knowledge sharing sessions to enhance team capabilities.- Monitor and evaluate team performance to ensure alignment with project goals. Professional &amp;amp; Technical &lt;b&gt;Skills:&lt;/b&gt; Must To Have &lt;b&gt;Skills:&lt;/b&gt; &lt;li&gt;Proficiency in Microsoft Azure Data Services.- Strong understanding of data modeling techniques and best practices.- Experience with data integration tools and ETL processes.- Familiarity with cloud-based data storage solutions and architectures.- Ability to design and implement data governance frameworks. Additional Information:- The candidate should have minimum 7.5 years of experience in Microsoft Azure Data Services.- This position is based at our Hyderabad office.- A 15 years full time education is required.&lt;b&gt; Qualification&lt;/b&gt; 15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/809039/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/809039/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Mon, 19 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/808817/custom-software-engineer-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt; &lt;br&gt;Project Role :&lt;/b&gt;Custom Software Engineer&lt;br&gt; &lt;b&gt; Project Role Description :&lt;/b&gt;Develop custom software solutions to design, code, and enhance components across systems or applications. Use modern frameworks and agile practices to deliver scalable, high-performing solutions tailored to specific business needs. &lt;br&gt; &lt;b&gt;Must have skills :&lt;/b&gt;Informatica MDM&lt;br&gt; &lt;b&gt; &lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;5&lt;/b&gt; year(s) of experience is required&lt;br&gt; &lt;b&gt; &lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time education &lt;b&gt;Summary&lt;/b&gt;:As a Data Platform Engineer, you will assist with the data platform blueprint and design, encompassing the relevant data platform components. Your typical day will involve collaborating with Integration Architects and Data Architects to ensure cohesive integration between systems and data models, while also engaging in discussions to refine and enhance the overall data architecture strategy. &lt;div&gt; &lt;b&gt;Roles &amp;amp; Responsibilities:&lt;/b&gt; &lt;/div&gt; &lt;li&gt;Expected to be an SME.&lt;/li&gt; &lt;li&gt;Collaborate and manage the team to perform.&lt;/li&gt; &lt;li&gt;Responsible for team decisions.&lt;/li&gt; &lt;li&gt;Engage with multiple teams and contribute on key decisions.&lt;/li&gt; &lt;li&gt;Provide solutions to problems for their immediate team and across multiple teams.&lt;/li&gt; &lt;li&gt;Facilitate knowledge sharing sessions to enhance team capabilities.&lt;/li&gt; &lt;li&gt;Monitor project progress and ensure alignment with strategic goals. &lt;b&gt;Professional &amp;amp; Technical Skills:&lt;/b&gt; &lt;br&gt;Must To Have Skills:&lt;br&gt;Proficiency in Informatica MDM.&lt;/li&gt; &lt;li&gt;Strong understanding of data integration techniques and best practices.&lt;/li&gt; &lt;li&gt;Experience with data modeling and database design.&lt;/li&gt; &lt;li&gt;Familiarity with ETL processes and data warehousing concepts.&lt;/li&gt; &lt;li&gt;Ability to troubleshoot and resolve data-related issues efficiently. &lt;br&gt;Additional Information:&lt;/li&gt; &lt;li&gt;The candidate should have minimum 7.5 years of experience in Informatica MDM.&lt;/li&gt; &lt;li&gt;This position is based at our Bengaluru office.&lt;/li&gt; &lt;li&gt;A 15 years full time education is required.&lt;br&gt; Qualification15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Kolkata&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/808817/custom-software-engineer-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/808817/custom-software-engineer-at-accenture/</link>
  <title>[Full Time] Custom Software Engineer at Accenture</title>
  <dc:date>Mon, 19 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/809045/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;&lt;/b&gt;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Microsoft SQL Server Integration Services (SSIS)&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;2&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time educationKey ResponsibilitiesAssist in planning:Run assessments, document findings, prepare checklists, and collect environment details.Develop migration scripts:Write T-SQL and PowerShell automation under senior guidance, and document usage.Execute backups &amp;amp; restores:Perform full/differential backups, test restores and validate schema/data integrity.Perform migration testing:Run regression tests, validate row counts/checksums, and log results.Support cutover tasks:Execute assigned migration steps (backups, restores).Monitor &amp;amp; report:Track migration progress, log errors, escalate issues, and monitor system health during hypercare.Documentation:Maintain runbooks, migration steps, and post-migration validation reports.Required SkillsBasic SQL Server administration (backups, restores, jobs, security roles).Hands on experience with Microsoft SQL server (version 2016 and above) and SQL Server Management Studio (SSMS)SQL development/T-SQL scripting for routine tasks.Familiarity with DMA &amp;amp; SSMS upgrade tools.Testing &amp;amp; validation skills.Documentation &amp;amp; communication. &lt;br&gt;Additional Information:&lt;li&gt;The candidate should have minimum 2 years of experience in Microsoft SQL Server Integration Services (SSIS).&lt;/li&gt;&lt;li&gt;This position is based at our Hyderabad office.&lt;/li&gt;&lt;li&gt;A 15 years full time education is required.&lt;br&gt;&lt;b&gt;Qualification&lt;/b&gt;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/809045/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/809045/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Mon, 19 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/809041/data-architect-at-accenture/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;b&gt;&lt;/b&gt;&lt;br&gt;&lt;b&gt;&lt;br&gt;Project Role :&lt;/b&gt;Data Architect&lt;br&gt;&lt;b&gt;Project Role Description :&lt;/b&gt;Define the data requirements and structure for the application. Model and design the application data structure, storage and integration. &lt;br&gt;&lt;b&gt;Must have skills :&lt;/b&gt;Microsoft SQL Server Integration Services (SSIS)&lt;br&gt;&lt;b&gt;&lt;br&gt;Good to have skills :&lt;/b&gt;NA&lt;br&gt;Minimum &lt;b&gt;5&lt;/b&gt; year(s) of experience is required&lt;br&gt;&lt;b&gt;&lt;br&gt;Educational Qualification :&lt;/b&gt;15 years full time educationKey ResponsibilitiesDefine migration strategy &amp;amp; roadmap:Establish timelines, downtime windows, rollback plans, and communication with stakeholders.Assess compatibility &amp;amp; risks:Use migration assessment tool (SQL Server Management Studio 22) to identify deprecated features and breaking changes.Infrastructure readiness:Coordinate with client/system admins to provision SQL Server 2025 servers (hardware/VMs, storage, networking).Design &amp;amp; validate migration scripts:Create and review T-SQL and PowerShell scripts for schema migration, data movement.Oversee testing &amp;amp; validation:Define unit test scenarios, ensure data integrity, and validate application connectivity.Co-ordinate in deployment/cutover activitiesRequired SkillsSQL Server administration (installation, configuration, migration methods).Hands on experience with Microsoft SQL server (version 2016 and above) and SQL Server Management Studio (SSMS)Strong SQL development knowledge, T-SQL &amp;amp; scripting for automation. PowerShell Scripting is added advantage.Basic Infrastructure knowledge (Windows Server, storage, networking).Project management &amp;amp; leadership.Troubleshooting &amp;amp; root cause analysis. &lt;br&gt;Additional Information:&lt;li&gt;The candidate should have minimum 5 years of experience in Microsoft SQL Server Integration Services (SSIS).&lt;/li&gt;&lt;li&gt;This position is based at our Hyderabad office.&lt;/li&gt;&lt;li&gt;A 15 years full time education is required.&lt;br&gt;&lt;b&gt;Qualification&lt;/b&gt;15 years full time education&lt;/li&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Accenture&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/809041/data-architect-at-accenture/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/809041/data-architect-at-accenture/</link>
  <title>[Full Time] Data Architect at Accenture</title>
  <dc:date>Mon, 19 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/807372/sr-engineer-software-at-empower/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;Our vision for the future is based on the idea that transforming financial lives starts by giving our people the freedom to transform their own. We have a flexible work environment, and fluid career paths. We not only encourage but celebrate internal mobility. We also recognize the importance of purpose, well-being, and work-life balance. Within Empower and our communities, we work hard to create a welcoming and inclusive environment, and our associates dedicate thousands of hours to volunteering for causes that matter most to them.&lt;/p&gt;&lt;p&gt;Chart your own path and grow your career while helping more customers achieve financial freedom. Empower Yourself.&lt;/p&gt;&lt;p&gt;6-8 years of hands-on technology experience with ability work independently and has an understanding of how a system is designed technically&lt;/p&gt;&lt;p&gt;A minimum of 6 - 8 years of hands-on experience on Pro*C,PL/SQL,Oracle SQL,UNIX (Solaris) and Java programming language.&lt;/p&gt;&lt;p&gt;Agile/ Scrum methodology&lt;/p&gt;&lt;p&gt;Shell Programming (C-shell, KORN Shell) and PERL&lt;/p&gt;&lt;p&gt;Degree in Computer Science or Information Systems, or equivalent applicable work experience&lt;br&gt;Enthusiasm to work and learn in a team environment.&lt;br&gt;Ability to work on a task independently with minimal supervision.&lt;br&gt;Experience in designing a system by working with other technical architects&lt;br&gt;Excellent written and verbal communication skills&lt;br&gt;Experience with JavaScript and either Angular or React JS frame work is a plus&lt;br&gt;Experience with Oracle Forms is a plus.&lt;/p&gt; This job description is not intended to be an exhaustive list of all duties, responsibilities and qualifications of the job.&amp;nbsp; The employer has the right to revise this job description at any time.&amp;nbsp;&amp;nbsp; You will be evaluated in part based on your performance of the responsibilities and/or tasks listed in this job description.&amp;nbsp;&amp;nbsp; You may be required perform other duties that are not included on this job description. The job description is not a contract for employment, and either you or the employer may terminate employment at any time, for any reason, as per terms and conditions of your employment contract.&lt;p&gt;&lt;span&gt;We are an equal opportunity employer with a commitment to diversity. &amp;nbsp;All individuals, regardless of personal characteristics, are encouraged to apply. &amp;nbsp;All qualified applicants will receive consideration for employment without regard to age, race, color, national origin, ancestry, sex, sexual orientation, gender, gender identity, gender expression, marital status, pregnancy, religion, physical or mental disability, military or veteran status, genetic information, or any other status protected by applicable state or local law.&amp;nbsp;&lt;/span&gt;&lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Financial Services&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Developer / Engineer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Empower&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/807372/sr-engineer-software-at-empower/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/807372/sr-engineer-software-at-empower/</link>
  <title>[Full Time] Sr Engineer Software at Empower</title>
  <dc:date>Sun, 18 Jan 2026 10:12:57 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/808949/data-architect-at-virtusa/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;ul&gt; &lt;li&gt; 12-15 Years of Exp as Data ArchitectWork with business stakeholders, analysts, and technology teams to understand data requirements, usage patterns, and strategic objectives. &lt;/li&gt; &lt;li&gt; Assess existing data landscapes to understand current-state data flows, structures, and dependencies. &lt;/li&gt; &lt;li&gt; Define end-to-end data architecture covering data ingestion, storage, processing, and consumption layers. &lt;/li&gt; &lt;li&gt; Design target-state data models and data flow architectures to support analytics, reporting, and operational needs. &lt;/li&gt; &lt;li&gt; Create and maintain data architecture diagrams, data models, and design documentation. &lt;/li&gt; &lt;li&gt; Ensure data solutions align with enterprise architecture standards and long-term data strategy. &lt;/li&gt; &lt;li&gt; Collaborate with engineering teams to clarify design intent and support implementation activities. &lt;/li&gt; &lt;li&gt; Review data solution designs to ensure scalability, performance, reliability, and consistency. &lt;/li&gt; &lt;li&gt; Identify data-related risks such as data quality, lineage gaps, or integration issues, and propose mitigation approaches. &lt;/li&gt; &lt;li&gt; Support data governance initiatives by aligning architecture with data ownership, stewardship, and usage policies. &lt;/li&gt; &lt;li&gt; Participate in design reviews, planning sessions, and cross-team coordination meetings. &lt;/li&gt; &lt;li&gt; Support modernization or transformation initiatives, including data platform upgrades or consolidation efforts. &lt;/li&gt; &lt;li&gt; Provide guidance during testing, deployment, and post-implementation reviews to ensure architectural compliance. &lt;/li&gt; &lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Banking&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Virtusa&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/808949/data-architect-at-virtusa/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/808949/data-architect-at-virtusa/</link>
  <title>[Full Time] Data Architect at Virtusa</title>
  <dc:date>Fri, 16 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/807477/senior-software-engineer-c-developer-at-nice/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&lt;strong&gt;So, whats the role all about?&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;As a &lt;strong&gt;Sr. Cloud Services Automation Engineer, &lt;/strong&gt;&lt;strong&gt;you will be &lt;/strong&gt;responsible for designing, developing, and maintaining robust end-to-end automation solutions that support our customer onboarding processes from an on-prem software solution to Azure SAAS platform and streamline cloud operations. You will work closely with Professional Services, Cloud Operations, and Engineering teams to implement tools and frameworks that ensure seamless deployment, monitoring, and self-healing of applications running in Azure.&lt;/p&gt;&lt;p&gt;&lt;strong&gt;How will you make an impact? &lt;/strong&gt;&amp;nbsp;&lt;/p&gt;&lt;p&gt;&lt;span&gt;&lt;strong&gt;4- 6 years Expereince&lt;/strong&gt;&lt;/span&gt;&lt;/p&gt;&lt;ul&gt; &lt;li&gt;&lt;strong&gt;Design and develop automated workflows&lt;/strong&gt;that orchestrate complex processes across multiple systems, databases, endpoints, and storage solutions in on-prem and public cloud.&lt;/li&gt; &lt;li&gt;Design, develop, and maintain &lt;strong&gt;internal tools/utilities using C#, PowerShell, Python, Bash&lt;/strong&gt; to automate and optimize cloud onboarding workflows.&lt;/li&gt; &lt;li&gt;Create integrations with &lt;strong&gt;REST APIs&lt;/strong&gt; and other services to ingest and process external/internal data. &lt;strong&gt;Debug and troubleshoot &lt;/strong&gt;existing APIs for errors.&amp;nbsp;Perform regular &lt;strong&gt;API testing&lt;/strong&gt; to ensure functionality and performance.&amp;nbsp;&lt;/li&gt; &lt;li&gt;&lt;strong&gt;Query and analyze data&lt;/strong&gt; from various sources such as, &lt;strong&gt;SQL&lt;/strong&gt; databases, &lt;strong&gt;Elastic Search indices&lt;/strong&gt; and &lt;strong&gt;Log files&lt;/strong&gt; (structured and unstructured)&lt;/li&gt; &lt;li&gt;Develop utilities to &lt;strong&gt;visualize, summarize, &lt;/strong&gt;or otherwise&lt;strong&gt; &lt;/strong&gt;make data&lt;strong&gt; actionable&lt;/strong&gt; for Professional Services and QA engineers.&lt;/li&gt; &lt;li&gt;Work closely with &lt;strong&gt;test, ingestion, and configuration teams&lt;/strong&gt; to understand bottlenecks and build &lt;strong&gt;self-healing mechanisms&lt;/strong&gt; for high availability and performance.&lt;/li&gt; &lt;li&gt;&lt;strong&gt;Build automated data pipelines with data consistency and reconciliation checks &lt;/strong&gt;using tools like PowerBI/Grafana&amp;nbsp;for collecting metrics from multiple endpoints and generating centralized and actionable dashboards.&lt;/li&gt; &lt;li&gt;&lt;strong&gt;Automate resource provisioning&lt;/strong&gt;across Azure services including AKS, Web Apps, and storage solutions&lt;/li&gt; &lt;li&gt;Experience in building &lt;strong&gt;Infrastructure-as-code (IaC) solutions&lt;/strong&gt; using tools like Terraform, Bicep, or ARM templates&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Have you got what it takes? &lt;/strong&gt;&lt;/p&gt;&lt;ul&gt; &lt;li&gt;Bachelors degree in computer science, Engineering, or related field (or equivalent experience).&lt;/li&gt; &lt;li&gt;Proficiency in scripting and programming languages (e.g., C#, .NET, PowerShell, Python, Bash).&lt;/li&gt; &lt;li&gt;Experience working with and integrating REST APIs&lt;/li&gt; &lt;li&gt;Experience with IaC and configuration management tools (e.g., Terraform, Ansible)&lt;/li&gt; &lt;li&gt;Familiarity with monitoring and logging solutions (e.g., Azure Monitor, Log Analytics, Prometheus, Grafana).&lt;/li&gt; &lt;li&gt;Familiarity with modern version control systems (e.g., GitHub).&lt;/li&gt; &lt;li&gt;Excellent problem-solving skills and attention to detail.&lt;/li&gt; &lt;li&gt;Ability to work with development and operations teams, to achieve desired results, on common projects&lt;/li&gt; &lt;li&gt;Strategic thinker and capable of learning new technologies quickly&lt;/li&gt; &lt;li&gt;Good communication with peers, subordinates and managers&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;You will have an advantage if you also have:&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt; &lt;li&gt;Experience with AKS infrastructure administration.&lt;/li&gt; &lt;li&gt;Experience orchestrating automation with Azure Automation tools like Logic Apps.&lt;/li&gt; &lt;li&gt;Experience working in a secure, compliance driven environment (e.g. CJIS/PCI/SOX/ISO)&lt;/li&gt; &lt;li&gt;Certifications in vendor or industry specific technologies.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Whats in it for you?&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;Join an ever-growing, market disrupting, global company where the teams  comprised of the best of the best  work in a fast-paced, collaborative, and creative environment! As the market leader, every day at NICE is a chance to learn and grow, and there are endless internal career opportunities across multiple roles, disciplines, domains, and locations. If you are passionate, innovative, and excited to constantly raise the bar, you may just be our next NICEr!&lt;/p&gt;&lt;p&gt;&lt;strong&gt;&amp;nbsp;Enjoy NICE-FLEX! &lt;/strong&gt;&lt;/p&gt;&lt;p&gt;At NICE, we work according to the NICE-FLEX hybrid model, which enables maximum flexibility: 2 days working from the office and 3 days of remote work, each week. Naturally, office days focus on face-to-face meetings, where teamwork and collaborative thinking generate innovation, new ideas, and a vibrant, interactive atmosphere.&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Requisition ID: 9495&lt;/strong&gt;&lt;strong&gt;&lt;br&gt;Reporting into: &lt;/strong&gt;Director of Cloud Services&lt;br&gt;&lt;strong&gt;Role Type: &lt;/strong&gt;Individual Contributor&lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Software Product&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;NICE&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Pune&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/807477/senior-software-engineer-c-developer-at-nice/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/807477/senior-software-engineer-c-developer-at-nice/</link>
  <title>[Full Time] Senior Software Engineer (C# Developer) at NICE</title>
  <dc:date>Thu, 15 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/805682/technical-specialist-app-engg-services-at-birlasoft/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;div&gt; &lt;div&gt; Area(s) of responsibility &lt;/div&gt; &lt;div&gt; &lt;p&gt; &lt;strong&gt;Sr. Data Migration - Windchill&lt;/strong&gt; &lt;/p&gt;&lt;br&gt;&lt;ul&gt;&lt;br&gt;&lt;li&gt;Should be expert in Windchill Migration using Windchill Bulk Migrator (WBM) - at least have executed 5-6 Windchill migration project using WBM.&lt;/li&gt;&lt;br&gt;&lt;li&gt;Should be expert in WBM tool execution (Extraction, Transformation &amp;amp; Loading)&lt;/li&gt;&lt;br&gt;&lt;li&gt;Experience in data migration including CAD Data migration.&lt;/li&gt;&lt;br&gt;&lt;li&gt;Experience in at least one non-Windchill to Windchill data migration.&lt;/li&gt;&lt;br&gt;&lt;li&gt;Should have good understanding of Windchill Architecture, database etc.&lt;/li&gt;&lt;br&gt;&lt;li&gt;Should have good understanding of Windchill object models, relationships, content.&lt;/li&gt;&lt;br&gt;&lt;li&gt;Should have experience on working with Customer for Migration Requirements Gathering, Source Data Analysis and Data Mapping&lt;/li&gt;&lt;br&gt;&lt;li&gt;Scripting Knowledge on Database - Oracle/SQL Server with large data set analysis&lt;/li&gt;&lt;br&gt;&lt;li&gt;Review existing source systems and datasets.&lt;/li&gt;&lt;br&gt;&lt;li&gt;Data Mapping exercise with SME s&lt;/li&gt;&lt;br&gt;&lt;li&gt;Support Extraction of Data from Enovia, SAP, UDI Portal, and E-Manual Website&lt;/li&gt;&lt;br&gt;&lt;li&gt;Transform, load, and Validate the Enovia, SAP, UDI Portal s, E Manual Data and other source systems identified in scope.&lt;/li&gt;&lt;br&gt;&lt;li&gt;Support Validation testing.&lt;/li&gt;&lt;br&gt;&lt;li&gt;Issue Resolution and Update Scripts for migration.&lt;/li&gt;&lt;br&gt;&lt;li&gt;Cut over planning.&lt;/li&gt;&lt;br&gt;&lt;li&gt;Strong problem-solving abilities&lt;/li&gt;&lt;br&gt;&lt;li&gt;Strong communication skills&lt;/li&gt;&lt;br&gt;&lt;/ul&gt;&lt;br&gt; &lt;/div&gt; &lt;/div&gt; &lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Birlasoft&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Pune&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/805682/technical-specialist-app-engg-services-at-birlasoft/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/805682/technical-specialist-app-engg-services-at-birlasoft/</link>
  <title>[Full Time] Technical Specialist-App Engg Services at Birlasoft</title>
  <dc:date>Wed, 14 Jan 2026 13:26:33 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/806778/servicenow-sam-ham-itam-developer-with-cmdb-skills-at-kyndryl/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&lt;b&gt;&lt;span&gt;Who We Are&lt;/span&gt;&lt;/b&gt;&lt;/p&gt;&lt;p&gt;At Kyndryl, we run and reimagine the mission-critical technology systems that drive advantage for the worlds leading businesses.&amp;nbsp; We are at the heart of progress; with proven expertise and a continuous flow of AI-powered insight, enabling smarter decisions, faster innovation, and a lasting competitive edge. For our peopleKyndrylsthat means doing purposeful work that powers human progress. Join us and experience a flexible, supportive environment where your well-being is prioritized and your potential can thrive.&lt;/p&gt;&lt;p&gt;&lt;br&gt;&lt;b&gt;&lt;span&gt;The Role&lt;/span&gt;&lt;/b&gt;&lt;/p&gt;&lt;p&gt;Are you passionate about solving complex problems? Do you thrive in a fast-paced environment? Then theres a good chance you will love being a part of our Software Engineering  Development team at Kyndryl, where you will be able to see the immediate value of your work.&amp;nbsp;As a Software Engineering - Developer at Kyndryl, you will be at the forefront of designing, developing, and implementing cutting-edge software solutions. Your work will play a critical role in our business offering, your code will deliver value to our customers faster than ever before, and your attention to detail and commitment to quality will be critical in ensuring the success of our products.&lt;/p&gt;&lt;p&gt;Update and extend the Data Model (table changes, form changes and permissions) based on approved requests&lt;/p&gt;&lt;p&gt; Design and develop data load process for data loading&amp;nbsp;&lt;/p&gt;&lt;p&gt; Provide support for CMDB issues&lt;/p&gt;&lt;p&gt; Implement ServiceNow development stories and RFSs&lt;/p&gt;&lt;p&gt; Customize or support CMDB Integrations&amp;nbsp;&lt;/p&gt;&lt;p&gt; Provide support for complex problems related to the CMDB solution that are raised by the operation teams&lt;/p&gt;&lt;p&gt; Configure, deploy, and customize standard reports and dashboards, including ServiceNow CMDB Audit Dashboard&lt;/p&gt;&lt;p&gt; Create and update CMDB catalog items, forms and views, transform maps, IRE, Script Includes and Business rules&lt;/p&gt;&lt;p&gt;&amp;nbsp; For a solution deployed with ServiceNow Discovery&lt;/p&gt;&lt;p&gt; Configure ServiceNow MID-server for Discovery deployment&lt;/p&gt;&lt;p&gt; Enhance custom patterns (probes and sensors) for requested and approved changes&lt;/p&gt;&lt;p&gt; Create new patterns for new types of devices or requirements&lt;/p&gt;&lt;p&gt; Provide support for discovery/Service Mapping issues raised by the Configuration Manager or Configuration &amp;nbsp; &amp;nbsp; Librarian including network access and permissions/credentials&lt;/p&gt;&lt;p&gt; Work with support teams to resolve network access and permissions/credentials issues has context menu&lt;br&gt;&amp;nbsp;&lt;/p&gt;&lt;p&gt;Software development methodologies, with demonstrated experience developing scalable and robust software&lt;br&gt;Experienced in relational and NoSQL databases, data mapping, XML/JSON, Rest based web services&lt;br&gt;Knowledge of architecture design - Microservices architecture, containers (Docker &amp;amp; k8s), messaging queues&lt;br&gt;Deep understanding of OOP and Design patterns&lt;/p&gt;&lt;p&gt;As a valued member of our team, you will provide work estimates for assigned development work, and guide features, functional objectives, or technologies being built for interested parties. Your contributions will have a significant impact on our products&apos; success, and you will be part of a team that is passionate about innovation, creativity, and excellence. Above all else, you will have the freedom to drive innovation and take ownership of your work while honing your problem-solving, collaboration, and automation skills. Together, we can make a difference in the world of cloud-based managed services.Your Future at Kyndryl&lt;br&gt;The career path ahead is full of exciting opportunities to grow and advance within the job family. With dedication and hard work, you can climb the ladder to higher bands, achieving coveted positions such as Principal Engineer or Vice President of Software. These roles not only offer the chance to inspire and innovate, but also bring with them a sense of pride and accomplishment for having reached the pinnacle of your career in the software industry.&lt;/p&gt;&lt;p&gt;&lt;br&gt;&lt;b&gt;&lt;span&gt;Who You Are&lt;/span&gt;&lt;/b&gt;&lt;/p&gt;&lt;p&gt;Youre good at what you do and possess the required experience to prove it. However, equally as important  you have a growth mindset; keen to drive your own personal and professional development. You are customer-focused  someone who prioritizes customer success in their work. And finally, youre open and borderless  naturally inclusive in how you work with others.Required Technical and Professional Experience&lt;br&gt;CMDB L3 HAM/SAM Developer---&amp;gt;&lt;br&gt; Min 7 years of experience working as a software engineer on complex software projects&lt;br&gt;Excellent coding skills and solid development experience (Java, Python, .Net etc.) with debugging and problem-solving skills&lt;/p&gt;&lt;p&gt;An SME/Developer will perform the following activities under their scope of responsibility.&lt;/p&gt;&lt;p&gt; Periodic updates to entry points.&lt;br&gt; Resolve discovery errors and credential errors.&amp;nbsp;&lt;br&gt; Maintain Service Mapping schedules.&lt;br&gt; Periodic updates to entry points.&lt;br&gt; Resolve discovery errors and credential errors.&lt;br&gt; Maintain Service Mapping schedules.&lt;br&gt; Update and extend the Data Model (table changes, form changes and permissions) based on approved requests&lt;br&gt; Design and develop data load process for data loading&lt;br&gt; Provide support for CMDB issues&lt;br&gt; Implement ServiceNow development stories and RFSs&lt;br&gt; Customize or support CMDB Integrations provide support for complex problems related to the CMDB solution that are &amp;nbsp; &amp;nbsp; &amp;nbsp; raised by the operation teams configure, deploy, and customize standard reports and dashboards, including ServiceNow CMDB Audit Dashboard Create and update CMDB catalog items, forms and views, transform maps, IRE, Script Includes and Business rules.&lt;/p&gt;&lt;p&gt;&lt;br&gt;For a solution deployed with ServiceNow Discovery, the SME/Developer has additional responsibilities pertaining to aspects of the Discovery processes and patterns including the following specific responsibilities&lt;/p&gt;&lt;p&gt;Configure ServiceNow MID-server for Discovery deployment&lt;br&gt;Enhance custom patterns (probes and sensors) for requested and approved changes&lt;br&gt;Create new patterns for new types of devices or requirements&lt;br&gt;Provide support for discovery/Service Mapping issues raised by the Configuration Manager or Configuration Librarian including network access and permissions/credentials&lt;br&gt;Work with support teams to resolve network access and permissions/credentials issues&lt;br&gt;Preferred Technical and Professional ExperienceBachelor&apos;s degree in Computer Science, related technical field, or equivalent practical experience&lt;br&gt;Certification in one or more of the hyperscalers (Azure, AWS, and Google GCP) - otherwise, you can obtain certifications with Kyndryl&lt;br&gt;Experience with DevOps tools and modern engineering practices&lt;/p&gt;&lt;p&gt;&lt;br&gt;&lt;b&gt;&lt;span&gt;Being You&lt;/span&gt;&lt;/b&gt;&lt;/p&gt;&lt;p&gt;The Kyn in Kyndryl means kinship, which represents the strong bonds we have with each other, our customers and our communities. We focus on ensuring all Kyndryls feel included and we welcome people of all cultures, backgrounds, and experiences. Even if you dont meet every requirement, we encourage you to apply. We believe in growth, and were excited to see what you can bring. At Kyndryl, employee feedback has told us that our number one driver of employee engagement is belonging. That sense of belonging  being a valued, respected, trusted member of the team  is fundamental to our culture and fueling great experiences for our customers. This dedication to welcoming everyone into our company means that Kyndryl gives you the ability to thrive and contribute to our culture of empathy and shared success. Thats The Kyndryl Way.&lt;/p&gt;&lt;p&gt;&lt;br&gt;&lt;span&gt;&lt;b&gt;What You Can Expect&lt;/b&gt;&lt;/span&gt;&lt;/p&gt;&lt;p&gt;Your career with us isnt just a jobits an adventure with purpose.&amp;nbsp; &lt;span&gt;We offer a dynamic, hybrid-friendly culture that supports your well-being and empowers you to grow. Our Be Well programs are thoughtfully designed to support your financial, mental, physical, and social healthbecause we know that when you feel your best, you do your best. From your very first day, youll dive into impactful work that powers the systems our customers rely on every day. You wont just contributeyoull make a difference, tackling meaningful projects that sharpen your skills and fuel your growth.Were here to champion your journey. With powerful tools to chart your career path, personalized development goals aligned with your ambitions, and continuous feedback to keep you inspired and on track, youll have everything you need to thrive and evolve. Youll develop in-demand skills to grow your career and achieve your ambitions with access to cutting-edge learning opportunitiesfrom certifications with Microsoft, Google, and Amazon to coaching and hands-on experiences. And through it all, youll be part of a culture that values empathy, restless learning, and a devotion to shared success. We want you to thrive hereand were committed to helping you do just that. Ready to make an impact? Join us and help shape whats next.&lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;&lt;b&gt;Get Referred!&lt;/b&gt; &lt;/span&gt;&lt;/p&gt;&lt;p&gt;&lt;span&gt;If you know someone that works at Kyndryl, when asked How Did You Hear About Us during the application process, select Employee Referral and enter your contact&apos;s Kyndryl email address.&lt;/span&gt;&lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Kyndryl&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/806778/servicenow-sam-ham-itam-developer-with-cmdb-skills-at-kyndryl/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/806778/servicenow-sam-ham-itam-developer-with-cmdb-skills-at-kyndryl/</link>
  <title>[Full Time] Servicenow Sam/ham (itam) Developer With Cmdb Skills at Kyndryl</title>
  <dc:date>Wed, 14 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/806646/senior-hogan-developer-at-luxoft/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt;&lt;strong&gt;Project description &lt;/strong&gt;&lt;p&gt;The project is focused on ensuring data privacy and compliance in the client environment by implementing and managing data masking solutions using the Delphix platform. This role involves collaboration with cross-functional teams to secure sensitive data while maintaining data integrity for development, testing, and analytics. &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;strong&gt;Responsibilities&lt;/strong&gt;&lt;div&gt;&lt;div&gt;&lt;p&gt;&lt;strong&gt;Key Responsibilities:&lt;/strong&gt;&lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;&lt;strong&gt;Design and Implementation:&lt;/strong&gt;&lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;Develop and implement robust data masking solutions using the Delphix platform. &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;Analyze data sets to identify sensitive information that requires masking. &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;Create and maintain masking rules, algorithms, and templates for various data environments. &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;&lt;strong&gt;Data Security &amp;amp; Compliance:&lt;/strong&gt;&lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;-Ensure sensitive data complies with regulatory requirements, such as GDPR, HIPAA, PCI-DSS, and CCPA. &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;-Collaborate with security and compliance teams to establish masking policies and standards. &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;&lt;strong&gt;Testing &amp;amp; Validation:&lt;/strong&gt;&lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;Conduct thorough testing to validate the effectiveness of masking solutions. &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;Troubleshoot and resolve issues related to data masking processes. &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;&lt;strong&gt;Collaboration &amp;amp; Support:&lt;/strong&gt;&lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;Work closely with database administrators, developers, and QA teams to integrate masking into workflows. &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;Provide training and documentation on the Delphix platform and data masking techniques. &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;&lt;strong&gt;Monitoring &amp;amp; Optimization:&lt;/strong&gt;&lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;Continuously monitor and optimize data masking performance. &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;Stay updated on the latest trends and updates in data masking technologies. &lt;/p&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;div&gt;&lt;strong&gt;Skills&lt;/strong&gt;&lt;p&gt;&lt;strong&gt;Must have &lt;/strong&gt;&lt;/p&gt;&lt;div&gt;&lt;div&gt;&lt;p&gt;&lt;strong&gt;Required Skills and Qualifications:&lt;/strong&gt;&lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;-Bachelor&apos;&apos;s degree in Computer Science, Information Technology, or a related field. &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;-10+ years of experience in mainframe environments with expertise of 1 year in Hogan applications. &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;-Strong knowledge of COBOL, JCL, VSAM, DB2, and CICS. Knowledge of ---IMS is an added advantage. &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;-Experience with Hogan modules such as IDS, CIS, or Loans. &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;-Familiarity with banking processes and financial services. &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;-Excellent problem-solving, debugging, and analytical skills. &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;-Strong communication skills, with the ability to collaborate across technical and business teams. &lt;/p&gt;&lt;/div&gt;&lt;/div&gt;&lt;p&gt;&lt;strong&gt;Nice to have &lt;/strong&gt;&lt;/p&gt;&lt;div&gt;&lt;div&gt;&lt;p&gt;&lt;strong&gt;Preferred Qualifications:&lt;/strong&gt;&lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;-Experience with mainframe modernization or migration projects. &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;-Knowledge of automation tools for mainframe systems. &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;-Familiarity with Agile and DevOps methodologies in a mainframe environment. &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;-Certification in mainframe technologies and Hogan systems. &lt;/p&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Legal&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Luxoft&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/806646/senior-hogan-developer-at-luxoft/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/806646/senior-hogan-developer-at-luxoft/</link>
  <title>[Full Time] Senior Hogan Developer at Luxoft</title>
  <dc:date>Wed, 14 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/806560/databricks-developer-at-infobeans/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;div&gt; &lt;div&gt; &lt;strong&gt; Roles and Responsibility &lt;/strong&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; Design and implement data pipelines using Databricks. &lt;/li&gt; &lt;li&gt; Collaborate with cross-functional teams to identify and prioritize project requirements. &lt;/li&gt; &lt;li&gt; Develop and maintain large-scale data architectures and systems. &lt;/li&gt; &lt;li&gt; Troubleshoot and resolve complex technical issues related to Databricks. &lt;/li&gt; &lt;li&gt; Optimize system performance and ensure scalability and reliability. &lt;/li&gt; &lt;li&gt; Provide technical guidance and support to junior team members. &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;strong&gt; Job Requirements &lt;/strong&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; Strong understanding of Databricks concepts and technologies. &lt;/li&gt; &lt;li&gt; Experience working with big data processing and analytics tools. &lt;/li&gt; &lt;li&gt; Excellent problem-solving skills and attention to detail. &lt;/li&gt; &lt;li&gt; Ability to work collaboratively in a fast-paced environment. &lt;/li&gt; &lt;li&gt; Strong communication and interpersonal skills. &lt;/li&gt; &lt;li&gt; Familiarity with agile development methodologies and version control systems. &lt;/li&gt; &lt;/ul&gt; &lt;/div&gt; &lt;/div&gt;&lt;br&gt;&lt;b&gt;Location - &lt;/b&gt;Pune,Nagar,Indore,Bengaluru&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Infobeans&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Indore&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/806560/databricks-developer-at-infobeans/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/806560/databricks-developer-at-infobeans/</link>
  <title>[Full Time] Databricks Developer at Infobeans</title>
  <dc:date>Wed, 14 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/805849/azure-data-architect-subcon-at-birlasoft/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;div&gt; &lt;div&gt; Area(s) of responsibility &lt;/div&gt; &lt;div&gt; &lt;p&gt; &lt;strong&gt;Skills: Azure Data Architect&lt;/strong&gt; &lt;/p&gt;&lt;br&gt;&lt;p&gt; &lt;strong&gt;Experience: 12+ years, 4+ Years of experience in Azure Databricks&lt;/strong&gt; &lt;/p&gt;&lt;br&gt;&lt;p&gt; &lt;strong&gt;Location: Mumbai/Pune/ Noida/ Bangalore/ Hyderabad/Chennai&lt;/strong&gt; &lt;/p&gt;&lt;p&gt; Azure Lead with experience in Azure ADF, ADLS Gen2, Databricks, PySpark and Advanced SQL&lt;/p&gt;&lt;br&gt;&lt;p&gt; Responsible for designing and implementing secure, scalable, and highly available cloud-based solutions and estimation on Azure Cloud&lt;/p&gt;&lt;br&gt;&lt;p&gt; 4 Years of experience in Azure Databricks and PySpark&lt;/p&gt;&lt;br&gt;&lt;p&gt; Experience in Performance Tuning&lt;/p&gt;&lt;br&gt;&lt;p&gt; Experience with integration of different data sources with Data Warehouse and Data Lake is required&lt;/p&gt;&lt;br&gt;&lt;p&gt; Experience in creating Data warehouse, data lakes&lt;/p&gt;&lt;br&gt;&lt;p&gt; Understanding of data modelling and data architecture concepts&lt;/p&gt;&lt;br&gt;&lt;p&gt; To be able to clearly articulate pros and cons of various technologies and platforms&lt;/p&gt;&lt;br&gt;&lt;p&gt; Experience in supporting tools GitHub, Jira, Teams, Confluence need to be used&lt;/p&gt;&lt;br&gt;&lt;p&gt; Collaborate with clients to understand their business requirements and translate them into technical solutions that leverage AWS and Azure cloud platforms.&lt;/p&gt;&lt;p&gt;Mandatory Skillset: Azure Databricks, PySpark and Advanced SQL&lt;/p&gt; &lt;/div&gt; &lt;/div&gt; &lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Architect / Consultant&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Birlasoft&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/805849/azure-data-architect-subcon-at-birlasoft/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/805849/azure-data-architect-subcon-at-birlasoft/</link>
  <title>[Full Time] Azure Data Architect-Subcon at Birlasoft</title>
  <dc:date>Wed, 14 Jan 2026 08:56:10 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/806085/postgress-subcontractor-at-birlasoft/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;div&gt; &lt;div&gt; &lt;div&gt; Responsibilities &lt;br&gt; Manage and maintain the companys PostgreSQL databases, ensuring their performance, availability, and security. &lt;br&gt; Conduct regular database backup and recovery procedures, ensuring data integrity and availability. &lt;br&gt; Monitor systems health and performance, optimizing as necessary for operational efficiency. &lt;br&gt; Collaborate with the development team to improve application and database designs. &lt;br&gt; Required Skills &lt;br&gt; Proficiency in PostgreSQL database administration, including backup and recovery procedures. &lt;br&gt; Strong knowledge of SQL and database management systems. &lt;br&gt; Experience with performance tuning and optimization techniques. &lt;br&gt; The candidate must have a Bachelors degree in Computer Science, Information Technology, or a related field. &lt;br&gt; Preferred Skills &lt;br&gt; Familiarity with other database technologies like MySQL, Oracle, MongoDB. &lt;br&gt; Knowledge of programming languages such as Python, Java, or C++. &lt;br&gt; Experience with cloud services like AWS, Google Cloud, or Azure. &lt;br&gt; Understanding of data warehousing and ETL processes. &lt;br&gt; Knowledge of database security measures. &lt;br&gt; Familiarity with Linux/Unix operating systems. &lt;br&gt; Experience with database design and architectural principles. &lt;br&gt; Knowledge of disaster recovery procedures. &lt;br&gt; Familiarity with version control systems like Git. &lt;br&gt; Experience with data visualization tools and reporting. &lt;br&gt; Required Experience &lt;br&gt; 7-10yrs &lt;/div&gt; &lt;/div&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Contract&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Birlasoft&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/806085/postgress-subcontractor-at-birlasoft/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/806085/postgress-subcontractor-at-birlasoft/</link>
  <title>[Contract] Postgress - Subcontractor at Birlasoft</title>
  <dc:date>Wed, 14 Jan 2026 07:21:59 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/806574/senior-mainframe-developer-at-luxoft/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt;&lt;strong&gt;Project description&lt;/strong&gt;&lt;p&gt;Luxoft DXC Technology Company is an established company focusing on consulting and implementation of complex projects in the financial industry. At the interface between technology and business, we convince with our know-how, well-founded methodology and pleasure in success. As a reliable partner to our renowned customers, we support them in planning, designing and implementing the desired innovations. Together with the customer, we deliver top performance!For one of our Client in the Insurance Segment, we are searching for a Senior Mainframe Developer. &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;strong&gt;Responsibilities&lt;/strong&gt;&lt;div&gt;&lt;p&gt;&lt;strong&gt;Essential Job Functions: &lt;/strong&gt;&lt;/p&gt;&lt;/div&gt;&lt;ul&gt;&lt;li&gt;Design and Development &lt;/li&gt;&lt;li&gt;Mainframe batch support &lt;/li&gt;&lt;li&gt;Resource should be comfortable with On-Call Support and Week-End &lt;/li&gt;&lt;li&gt;Release support. &lt;/li&gt;&lt;li&gt;Incident Outage management &lt;/li&gt;&lt;li&gt;Develop code for Reporting, Defect Enhancement &lt;/li&gt;&lt;li&gt;Hands on experience with JCL, COBOL, CICS, DB2, VSAM and mainframe utilities are mandatory. &lt;/li&gt;&lt;li&gt;Drive the Kanban, Daily Standup, co-ordination meetings with respective stakeholders. &lt;/li&gt;&lt;li&gt;Analytical, problem solving, creative thinking and design skills &lt;/li&gt;&lt;/ul&gt;&lt;/div&gt;&lt;div&gt;&lt;strong&gt;Skills&lt;/strong&gt;&lt;/div&gt;&lt;p&gt;&lt;strong&gt;Must have&lt;/strong&gt;&lt;/p&gt;&lt;div&gt;&lt;div&gt;&lt;p&gt;Strong Mainframe skills with minimum 7+ years of experience &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;Experience in developing design, develop, test, debug, and maintain mainframe applications using COBOL programming language &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;3.Utilize VSAM (Virtual Storage Access Method) for efficient data access and management. &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;4.Interact with databases using DB2, including SQL query optimization and performance tuning. &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;5.Utilize tools such as File-AID for data manipulation, browsing, and editing. &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;6.Write and execute SQL queries using SPUFI for data retrieval and manipulation. &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;7.Collaborate with cross-functional teams to analyze requirements, design solutions, and implement changes. &lt;/p&gt;&lt;/div&gt;&lt;/div&gt;&lt;p&gt;&lt;strong&gt;Nice to have &lt;/strong&gt;&lt;/p&gt;&lt;div&gt;&lt;div&gt;&lt;p&gt;Insurance domain experience. &lt;/p&gt;&lt;/div&gt;&lt;/div&gt;&lt;div&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Legal&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Luxoft&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/806574/senior-mainframe-developer-at-luxoft/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/806574/senior-mainframe-developer-at-luxoft/</link>
  <title>[Full Time] Senior Mainframe Developer at Luxoft</title>
  <dc:date>Tue, 13 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/806488/hiring-senior-cloud-sql-dba-sun-technology-inc-bangalore-at-sun-technologies/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;p&gt;Hi,&lt;/p&gt;&lt;br&gt;&lt;p&gt;Greetings from Sun Technology Integrators!!&lt;/p&gt;&lt;br&gt;&lt;p&gt;This is regarding a job opening with Sun Technology Integrators, Bangalore. Please find below the &lt;strong&gt;job description&lt;/strong&gt; for your reference. Kindly let me know your interest and share your updated CV to &lt;strong&gt;nandinis@suntechnologies.com &lt;/strong&gt;ASAP.&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Please let me know, if any of your friends are looking for a job change. Kindly share the references. &lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Please Note: WFO- Work From Office (We do not have Hybrid or Work From Home option) &lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Shift Details: 5:30PM to 2:30AM-2 ways free cab facility(Pick Up+Drop)&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Years of Exp-5 to 20 years&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Interview Mode- 2 Virtual Technical interviews &lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Mandatory Skills :&lt;/strong&gt; &lt;/p&gt;&lt;p&gt;&lt;strong&gt;Mandatory Skills :&lt;/strong&gt; &lt;/p&gt;&lt;p&gt;PostgreSQL-Primary skill&lt;/p&gt;&lt;p&gt;MySQL-Secondary skill&lt;/p&gt;&lt;p&gt;Any Cloud Technologies like GCP/AWS/Azure&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Permanent Position&lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt; Good experience as a Cloud DBA &lt;/p&gt;&lt;ul&gt;&lt;li&gt; Good experience with MySQL, Postgres Administration&lt;br&gt;  Cloud administration skills in GCP/Azure/AWS &lt;br&gt;  Experience with Cloud Migrations and Transformations&lt;br&gt;  Experience defining, developing, and implementing automated and repeatable cloud automation processes for databases (monitoring/alerting, backup, recovery, high availability, upgrades, etc.)&lt;br&gt;  Working knowledge and experience with DevOps (including infrastructure as code) tools, practice, and site reliability engineering principles &lt;br&gt;  Experience implementing best practices for monitoring and proactively identifying performance issues within Cloud database technologies&lt;br&gt;  Familiar with monitoring tools like percona, Grafana &amp;amp; MonYog&lt;br&gt;  Experience performing database analysis, architecture, and application development&lt;br&gt;  Considerable knowledge of different database management systems, query tools, database schemas, and distributed systems&lt;br&gt;  Strong experience with SQL and PL/SQL including solid troubleshooting &amp;amp; SQL tuning skills&lt;br&gt;  Strong analytical and problem-solving skills to address mission critical production database issues&lt;br&gt;  Excellent communication and organization skills with ability to operate in a lean, fast-paced organization&lt;br&gt;  Ability to write/modify Unix shell scripts.&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;Good to have:&lt;/p&gt;&lt;p&gt; GCP certification - GCP Administrator/Architect Certification and/or Cloud Security Certification &lt;/p&gt;&lt;br&gt;&lt;p&gt; Thanks and Regards,Nandini S | Sr.Technical Recruiter &lt;strong&gt;Sun Technology Integrators Pvt. Ltd.&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;nandinis@suntechnologies.com &lt;/li&gt;&lt;/ul&gt;&lt;p&gt;www.suntechnologies.com &lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Sun Technologies&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/806488/hiring-senior-cloud-sql-dba-sun-technology-inc-bangalore-at-sun-technologies/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/806488/hiring-senior-cloud-sql-dba-sun-technology-inc-bangalore-at-sun-technologies/</link>
  <title>[Full Time] Hiring Senior Cloud Sql Dba @ Sun Technology Inc, Bangalore at Sun Technologies</title>
  <dc:date>Tue, 13 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/806519/hiring-cloud-sql-dba-sun-technology-inc-bangalore-at-sun-technologies/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;p&gt;Hi,&lt;/p&gt;&lt;br&gt;&lt;p&gt;Greetings from Sun Technology Integrators!!&lt;/p&gt;&lt;br&gt;&lt;p&gt;This is regarding a job opening with Sun Technology Integrators, Bangalore. Please find below the &lt;strong&gt;job description&lt;/strong&gt; for your reference. Kindly let me know your interest and share your updated CV to &lt;strong&gt;nandinis@suntechnologies.com &lt;/strong&gt;ASAP.&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Please let me know, if any of your friends are looking for a job change. Kindly share the references. &lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Please Note: WFO- Work From Office (We do not have Hybrid or Work From Home option) &lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Shift Details: 5:30PM to 2:30AM-2 ways free cab facility(Pick Up+Drop)&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Years of Exp-5 to 20 years&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Interview Mode- 2 Virtual Technical interviews &lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Mandatory Skills :&lt;/strong&gt; &lt;/p&gt;&lt;p&gt;PostgreSQL-Primary skill&lt;/p&gt;&lt;p&gt;MySQL-Secondary skill&lt;/p&gt;&lt;p&gt;Any Cloud Technologies like GCP/AWS/Azure&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Permanent Position&lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt; Good experience as a Cloud DBA &lt;/p&gt;&lt;ul&gt;&lt;li&gt; Good experience with MySQL, Postgres Administration&lt;br&gt;  Cloud administration skills in GCP/Azure/AWS&lt;br&gt;  Experience with Cloud Migrations and Transformations&lt;br&gt;  Experience defining, developing, and implementing automated and repeatable cloud automation processes for databases (monitoring/alerting, backup, recovery, high availability, upgrades, etc.)&lt;br&gt;  Working knowledge and experience with DevOps (including infrastructure as code) tools, practice, and site reliability engineering principles &lt;br&gt;  Experience implementing best practices for monitoring and proactively identifying performance issues within Cloud database technologies&lt;br&gt;  Familiar with monitoring tools like percona, Grafana &amp;amp; MonYog&lt;br&gt;  Experience performing database analysis, architecture, and application development&lt;br&gt;  Considerable knowledge of different database management systems, query tools, database schemas, and distributed systems&lt;br&gt;  Strong experience with SQL and PL/SQL including solid troubleshooting &amp;amp; SQL tuning skills&lt;br&gt;  Strong analytical and problem-solving skills to address mission critical production database issues&lt;br&gt;  Excellent communication and organization skills with ability to operate in a lean, fast-paced organization&lt;br&gt;  Ability to write/modify Unix shell scripts&lt;/li&gt;&lt;li&gt;Good to have:&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;GCP certification preferred - GCP Administrator/Architect Certification and/or Cloud Security Certification&lt;/p&gt;&lt;br&gt;&lt;p&gt; Thanks and Regards,Nandini S | Sr.Technical Recruiter &lt;strong&gt;Sun Technology Integrators Pvt. Ltd.&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;nandinis@suntechnologies.com &lt;/li&gt;&lt;/ul&gt;&lt;p&gt;www.suntechnologies.com &lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Sun Technologies&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/806519/hiring-cloud-sql-dba-sun-technology-inc-bangalore-at-sun-technologies/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/806519/hiring-cloud-sql-dba-sun-technology-inc-bangalore-at-sun-technologies/</link>
  <title>[Full Time] Hiring Cloud Sql Dba @ Sun Technology Inc, Bangalore at Sun Technologies</title>
  <dc:date>Tue, 13 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/806597/senior-software-engineer-at-luxoft/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt;&lt;strong&gt;Project description&lt;/strong&gt;&lt;p&gt;Information and Document Systems is a global technology change and delivery organization comprising nearly 200 individuals located mostly in Switzerland, Poland, and Singapore. Providing global capturing and document processing, archiving, and retrieval solutions to all business divisions focusing on supporting Legal, Regulatory, and Operational functions. &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;strong&gt;Responsibilities&lt;/strong&gt;&lt;/div&gt;&lt;ul&gt;&lt;li&gt;Design, implement, and manage data solutions on Azure &lt;/li&gt;&lt;li&gt;Develop and maintain data pipelines using Databricks &lt;/li&gt;&lt;li&gt;Ensure efficient data storage and retrieval using Azure Storage and Data Lake &lt;/li&gt;&lt;li&gt;Automate infrastructure and application deployments with Ansible &lt;/li&gt;&lt;li&gt;Write clean, maintainable code in Python &lt;/li&gt;&lt;li&gt;Collaborate with team members using Git and GitLab for version control and CI/CD &lt;/li&gt;&lt;li&gt;Gather and document business requirements through stakeholder interviews and workshops &lt;/li&gt;&lt;li&gt;Analyze and translate business needs into technical specifications &lt;/li&gt;&lt;li&gt;Apply data analytics to interpret data and support business decisions &lt;/li&gt;&lt;li&gt;Communicate effectively with stakeholders to manage expectations and provide project updates &lt;/li&gt;&lt;li&gt;Continuously seek opportunities to improve processes and drive efficiency &lt;/li&gt;&lt;li&gt;Share and contribute: Support and guide less senior team members, contribute to team spirit and dynamic growth, actively participate in wider engineering group and product-wide activities &lt;/li&gt;&lt;/ul&gt;&lt;div&gt;&lt;strong&gt;Skills&lt;/strong&gt;&lt;/div&gt;&lt;p&gt;&lt;strong&gt;Must have&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;Strong programming skills in Python for data manipulation and automation [Writing application code (front-end, back-end, or full-stack), Building APIs and services, Ensuring scalability, security, and performance of applications.] &lt;/li&gt;&lt;li&gt;Designs, develops, and maintains software applications or systems. Build functional, efficient, and user-friendly software products &lt;/li&gt;&lt;li&gt;10+ years of software engineers/analysis experience &lt;/li&gt;&lt;li&gt;Degree in Computer Science, Information Technology, or related field &lt;/li&gt;&lt;li&gt;Proficient in deploying and managing services on Microsoft Azure &lt;/li&gt;&lt;li&gt;Hands-on experience with Databricks for data processing and analytics &lt;/li&gt;&lt;li&gt;Understanding of Azure Storage concepts and best practices &lt;/li&gt;&lt;li&gt;Excellent verbal and written communication skills to interact with stakeholders and team members &lt;/li&gt;&lt;li&gt;Strong analytical abilities to interpret data and provide actionable insights to support data-driven decision makingA.Crucial: &lt;/li&gt;&lt;li&gt;Experience developing in Python &lt;/li&gt;&lt;li&gt;Self-motivated and capable of acting independently &lt;/li&gt;&lt;li&gt;Solid communication skills B. Important: &lt;/li&gt;&lt;li&gt;Azure experience (but any cloud experience is valuable) &lt;/li&gt;&lt;/ul&gt;&lt;p&gt;&lt;strong&gt;Nice to have &lt;/strong&gt;&lt;/p&gt;&lt;div&gt;&lt;div&gt;&lt;p&gt;i.Apache Spark/Databricks experience. &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;ii.Big Data experience in general &lt;/p&gt;&lt;/div&gt;&lt;div&gt;&lt;p&gt;iii.Data analysis &lt;/p&gt;&lt;/div&gt;&lt;/div&gt;&lt;div&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Legal&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Luxoft&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/806597/senior-software-engineer-at-luxoft/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/806597/senior-software-engineer-at-luxoft/</link>
  <title>[Full Time] Senior Software Engineer at Luxoft</title>
  <dc:date>Tue, 13 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/806609/etl-qa-at-cirruslabs/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;CirrusLabs Private Limited is looking for ETL QA to join our team Roles and Responsibility &lt;span&gt; &lt;/span&gt; &lt;div&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; Develop and implement comprehensive testing strategies for ETL processes to ensure data integrity and accuracy. &lt;/li&gt; &lt;li&gt; Collaborate with cross-functional teams to identify and resolve data-related issues and defects. &lt;/li&gt; &lt;li&gt; Design and execute automated tests using various tools and technologies to improve testing efficiency. &lt;/li&gt; &lt;li&gt; Analyze test results, identify trends, and recommend process improvements. &lt;/li&gt; &lt;li&gt; Develop and maintain detailed documentation of testing procedures and results. &lt;/li&gt; &lt;li&gt; Participate in agile development methodologies, providing feedback and guidance on testing requirements. &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; Job Requirements &lt;span&gt; &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; Strong understanding of ETL concepts, data warehousing principles, and database design. &lt;/li&gt; &lt;li&gt; Experience with testing frameworks, automation tools, and version control systems. &lt;/li&gt; &lt;li&gt; Excellent analytical, problem-solving, and communication skills. &lt;/li&gt; &lt;li&gt; Ability to work effectively in a team environment and collaborate with stakeholders. &lt;/li&gt; &lt;li&gt; Strong attention to detail and ability to prioritize tasks and meet deadlines. &lt;/li&gt; &lt;li&gt; Familiarity with industry standards and best practices in software development life cycles. &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt;Disclaimer: This job description has been sourced from a public domain and may have been modified by Naukri.com to improve clarity for our users. We encourage job seekers to verify all details directly with the employer via their official channels before applying.&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Software Product&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;ETL Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Cirruslabs&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/806609/etl-qa-at-cirruslabs/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/806609/etl-qa-at-cirruslabs/</link>
  <title>[Full Time] ETL QA at Cirruslabs</title>
  <dc:date>Tue, 13 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/806632/ms-azure-with-adf-databricks-expert-at-cirruslabs/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;- PowerBI and AAS expert (Strong SC or Specialist Senior) &lt;br&gt; - Should have hands-on experience of Data Modelling in Azure SQL Data Ware House and Azure Analysis Service &lt;br&gt; - Should be able twrite and test Dex queries &lt;br&gt; - Should be able generate Paginated Reports in PowerBI &lt;br&gt; - Should have minimum 3 Years working experience in delivering projects in PowerBI &lt;br&gt; ROLE 2: &lt;br&gt; - DataBricks expert (Strong SC or Specialist Senior) &lt;br&gt; - Should have minimum 3 years working experience of writing code in Spark and Scala &lt;br&gt; ROLE 3: &lt;br&gt; - One Azure backend expert (Strong SC or Specialist Senior) &lt;br&gt; - Should have hands-on experience of working with ADLS, ADF and Azure SQL DW &lt;br&gt; - Should have minimum 3 Years working experience of delivering Azure projects&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Software Product&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Manager&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Cirruslabs&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/806632/ms-azure-with-adf-databricks-expert-at-cirruslabs/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/806632/ms-azure-with-adf-databricks-expert-at-cirruslabs/</link>
  <title>[Full Time] MS Azure with ADF/Databricks Expert at Cirruslabs</title>
  <dc:date>Tue, 13 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/807055/senior-data-engineer-python-developer-at-luxoft/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;b&gt; Project description &lt;/b&gt; &lt;li&gt; Luxoft has been asked to contract a Developer in support of a number of customer initiatives. The primary objective is to develop based on client requirements in the Telecom/network work environment &lt;/li&gt; &lt;/div&gt; &lt;div&gt; &lt;b&gt; Responsibilities &lt;/b&gt; &lt;div&gt; &lt;div&gt; &lt;li&gt; A Data Engineer with experience in the following techologies: &lt;/li&gt; &lt;/div&gt; &lt;div&gt; &lt;li&gt; Databricks and Azure &lt;/li&gt; &lt;/div&gt; &lt;div&gt; &lt;li&gt; Apache Spark-based, hands on Python, SQL, Apache Airflow. &lt;/li&gt; &lt;/div&gt; &lt;div&gt; &lt;li&gt; Databricks clusters for ETL processes. &lt;/li&gt; &lt;/div&gt; &lt;div&gt; &lt;li&gt; Integration with ADLS, Blob Storage. &lt;/li&gt; &lt;/div&gt; &lt;div&gt; &lt;li&gt; Efficiently ingest data from various sources, including on-premises databases, cloud storage, APIs, and streaming data. &lt;/li&gt; &lt;/div&gt; &lt;div&gt; &lt;li&gt; Use Azure Key Vault for managing secrets. &lt;/li&gt; &lt;/div&gt; &lt;div&gt; &lt;li&gt; Hands on experience working with API&apos;&apos;s &lt;/li&gt; &lt;/div&gt; &lt;div&gt; &lt;li&gt; Kafka/Azure EventHub streaming hands on experience &lt;/li&gt; &lt;/div&gt; &lt;div&gt; &lt;li&gt; Hands on experience with data bricks delta API&apos;&apos;s and UC catalog Hands on experience working with version control tools Github &lt;/li&gt; &lt;/div&gt; &lt;div&gt; &lt;li&gt; Data Analytics &lt;/li&gt; &lt;/div&gt; &lt;div&gt; &lt;li&gt; Supports various ML frameworks. &lt;/li&gt; &lt;/div&gt; &lt;div&gt; &lt;li&gt; Integration with Databricks for model training. &lt;/li&gt; &lt;/div&gt; &lt;div&gt; &lt;li&gt; OnPrem &lt;/li&gt; &lt;/div&gt; &lt;div&gt; &lt;li&gt; Exposure on Linux based systems &lt;/li&gt; &lt;/div&gt; &lt;div&gt; &lt;li&gt; Unix scripting &lt;/li&gt; &lt;/div&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;b&gt; Skills &lt;/b&gt; &lt;li&gt; Must have &lt;/li&gt; &lt;div&gt; &lt;div&gt; &lt;li&gt; Python, Apache Airflow, Microsoft Azure and Databricks, SQL, databricks clusters for ETL, ADLS, Blob storage, ingestion from various sources including databases and cloud storage, APIs and streaming data, Kafka/Azure EventHub, databricks delta APIs and UC catalog. &lt;/li&gt; &lt;/div&gt; &lt;div&gt; &lt;li&gt; Education: Typically, a Bachelor&apos;&apos;s degree in Computer Science (preferably M.Sc. in Computer Science), Software Engineering, or a related field is required. &lt;/li&gt; &lt;/div&gt; &lt;div&gt; &lt;li&gt; Experience: 7+ years of experience in development or related fields. &lt;/li&gt; &lt;/div&gt; &lt;div&gt; &lt;li&gt; Problem-Solving Skills: Ability to troubleshoot and resolve issues related to application development and deployment. &lt;/li&gt; &lt;/div&gt; &lt;div&gt; &lt;li&gt; Communication Skills: Ability to effectively communicate technical concepts to team members and stakeholders. This includes written and verbal communication. &lt;/li&gt; &lt;/div&gt; &lt;div&gt; &lt;li&gt; Teamwork: Ability to work effectively in teams with diverse individuals and skill sets. &lt;/li&gt; &lt;/div&gt; &lt;div&gt; &lt;li&gt; Continuous Learning: Given the rapidly evolving nature of web technologies, a commitment to learning and adapting to new technologies and methodologies is crucial. &lt;/li&gt; &lt;/div&gt; &lt;/div&gt; &lt;li&gt; Nice to have &lt;/li&gt; &lt;div&gt; &lt;div&gt; &lt;li&gt; Snowflake, PostGre, Redis exposure &lt;/li&gt; &lt;/div&gt; &lt;div&gt; &lt;li&gt; GenAI exposure &lt;/li&gt; &lt;/div&gt; &lt;div&gt; &lt;li&gt; Good understanding of RBAC &lt;/li&gt; &lt;/div&gt; &lt;/div&gt; &lt;/div&gt; &lt;div&gt; &lt;b&gt; &lt;/b&gt; &lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Legal&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Luxoft&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/807055/senior-data-engineer-python-developer-at-luxoft/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/807055/senior-data-engineer-python-developer-at-luxoft/</link>
  <title>[Full Time] Senior Data Engineer/Python Developer at Luxoft</title>
  <dc:date>Tue, 13 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/804708/gcc-network-design-sr-modeler-at-pepsico/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;strong&gt; Overview &lt;/strong&gt; &lt;div&gt; &lt;span&gt; This role supports the development of the next generation network design capability, enabling industry leading design capability for all Global business units, Sectors, and ND Global Capability Centers, across the End-to-End Value Chain. &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; This role is responsible for developing and refining network optimization modeling projects and analyses that help source, deploy, store, and deliver our portfolio of products to customers at the lowest cost while supporting long-term growth. The Global Network Design Modeler is responsible for driving the testing of advanced modeling tools and designing new approaches and systems to optimize our supply chain network. This role requires deep understanding of optimization algorithms, data science techniques and supply chain operations. Ultimately, this role will be responsible for building and analysing optimization models using state-of-the-art optimization software to create scalable and effective solutions. &lt;/span&gt; &lt;/div&gt; &lt;div&gt; &lt;span&gt; This will enable identification of transformative projects that will deliver significant levels of profitability, efficiency, sustainability, and service, that align with the organizations global operational objectives. &lt;/span&gt; &lt;/div&gt; &lt;strong&gt; Responsibilities &lt;/strong&gt; &lt;ul&gt; &lt;li&gt; Support the validation and implementation of advanced supply chain models and tools to optimize the E2E supply chain. &lt;/li&gt; &lt;li&gt; Identify, recommend and develop process improvement and innovative approaches for network model building and to simplify/streamline the process. &lt;/li&gt; &lt;li&gt; Develop and maintain robust network optimization models to evaluate project hypothesis and strategic alternatives and quantify model changes in end-to-end supply chain. &lt;/li&gt; &lt;li&gt; Translate complex business problems into optimization models using tools like Supply Chain Guru &lt;/li&gt; &lt;li&gt; Utilize organizational systems and tools for data collection and to build project facts, benchmarking analyses, etc. &lt;/li&gt; &lt;li&gt; Ability to evaluate optimization models inputs and outputs by data automation, executing models, analyzing data and reporting &lt;/li&gt; &lt;li&gt; Partner with cross functional teams to provide necessary inputs for long term projects and ensure that project recommendations are organizationally aligned &lt;/li&gt; &lt;li&gt; Ability to manage multiple tasks and priorities concurrently and work well under tight timelines &lt;/li&gt; &lt;li&gt; Develops domain knowledge and Subject Matter Expertise (SME) in Network Modeling and analytics &lt;/li&gt; &lt;li&gt; Document model methodologies and provide training and support to analysts &lt;/li&gt; &lt;/ul&gt; &lt;strong&gt; Qualifications &lt;/strong&gt; &lt;ul&gt; &lt;li&gt; Bachelors Degree in Operations Research, Industrial Engineering/Supply Chain or similar. &lt;/li&gt; &lt;li&gt; 3+ years of experience in a supply chain planning/optimization function utilizing Modelling &amp;amp; Technical Design tools, such as Llamasoft, Coupa SCG, JDA or similar. &lt;/li&gt; &lt;li&gt; Ability to translate business problems into analytical frameworks &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;span&gt; &lt;strong&gt; Skills &lt;/strong&gt; &lt;/span&gt; &lt;/div&gt; &lt;ul&gt; &lt;li&gt; Leadership &lt;/li&gt; &lt;li&gt; Strong Communicator &lt;/li&gt; &lt;li&gt; Results Oriented &lt;/li&gt; &lt;li&gt; Agility to solve problems &lt;/li&gt; &lt;li&gt; Analytical Thinking &lt;/li&gt; &lt;li&gt; Teamwork &lt;/li&gt; &lt;li&gt; Excellent interpersonal relationships &lt;/li&gt; &lt;li&gt; Proficiency in Microsoft office suite, Tableau and Power BI &lt;/li&gt; &lt;li&gt; Manage of big data bases on SQL or similar (nice to have) &lt;/li&gt; &lt;li&gt; Use of Supply Chain Guru and Data Guru (must have) &lt;/li&gt; &lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;Beverage&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Architect / Designer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Pepsico&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Hyderabad&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/804708/gcc-network-design-sr-modeler-at-pepsico/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/804708/gcc-network-design-sr-modeler-at-pepsico/</link>
  <title>[Full Time] GCC Network Design Sr Modeler at Pepsico</title>
  <dc:date>Mon, 12 Jan 2026 20:08:18 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/804505/erp-cloud-techno-fucntional-professional-at-ncr-corporation/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;ul&gt; &lt;li&gt; Bachelor s / master s degree or equivalent in Computer Science or Information Technology. &lt;/li&gt; &lt;li&gt; &lt;div&gt; 9+ Years of Experience in IT industry working on Oracle Fusion cloud/ Oracle EBS r12. &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; Hands-on experience on Oracle Applications SCM Modules (Inventory/ Warehouse Management, Logistics/ Order Management / Procurement) &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; Proficient in oracle database skills - SQL. PL/SQL packages, Cursors, procedures. &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; Proficient on creating BIP Reports, FBDI imports. &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; Preferable Knowledge of Oracle Oracle Visual Builder Cloud Service. &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; Preferable knowledge of using Oracle fusion API s using VBCS Add-in. &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; Preferable knowledge of Oracle Cloud Integration. &lt;/div&gt; &lt;/li&gt; &lt;li&gt; &lt;div&gt; Design and develop integrations using Oracle Integration Cloud platform to enable seamless data flow between applications and systems. Configure and maintain OIC connections with external systems, APIs, and services. Develop integration interfaces, mappings, and transformations to ensure data consistency and accuracy. &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;/li&gt; &lt;/ul&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;br&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt; &lt;div&gt; &lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Developer / Engineer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;NCR Corporation&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Noida, Gurugram&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/804505/erp-cloud-techno-fucntional-professional-at-ncr-corporation/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/804505/erp-cloud-techno-fucntional-professional-at-ncr-corporation/</link>
  <title>[Full Time] ERP Cloud Techno-fucntional Professional at NCR Corporation</title>
  <dc:date>Mon, 12 Jan 2026 18:27:16 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/804430/etl-tester-senior-engineer-at-iris-software/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt;&lt;ul&gt;&lt;li&gt;Develop focused test plans&lt;/li&gt;&lt;/ul&gt;&lt;div&gt;&lt;div&gt;&lt;ul&gt;&lt;li&gt;Perform complex test cases and maintain associated test records&lt;/li&gt;&lt;li&gt;Participates in agile estimation taking into consideration product and process requirements during the development lifecycle&lt;/li&gt;&lt;li&gt;Assists in the process with development in order to provide timely and accurate resolution to critical customer issues&lt;/li&gt;&lt;li&gt;Engineering expert that can reverse engineer the most difficult defects, UX issues, source of dirty data and configuration errors. Ability to reconstruct these complex issues into a clear proof for development to fix.&lt;/li&gt;&lt;li&gt;Understands the quality portion of building, packaging and deploying code. Identify patterns that need to be addressed from a quality perspective prior to hitting QA environments.&lt;/li&gt;&lt;li&gt;Proactively works with other team members as needed to resolve issues and meet project goals&lt;/li&gt;&lt;li&gt;Set up and maintain testing environments&lt;/li&gt;&lt;li&gt;Follows defined engineering process during the development lifecycle&lt;/li&gt;&lt;li&gt;Performs peer test reviews to identify improvements to test plans&lt;/li&gt;&lt;li&gt;Provides technical leadership and influence to other team members&lt;/li&gt;&lt;li&gt;Coordinates his/her own work as well as the work required from others&lt;/li&gt;&lt;li&gt;Demonstrates mastery of problem-solving skills-test plans seldom require re-work&lt;/li&gt;&lt;li&gt;Uses customer understanding and knowledge of the relevant technologies to measure the quality of a better product/service&lt;/li&gt;&lt;/ul&gt;&lt;p&gt;QUALIFICATIONS/ EXP&lt;/p&gt;&lt;ul&gt;&lt;li&gt;6 to 8 years experience as a test engineer specially in ETL Testing.&lt;/li&gt;&lt;li&gt;Strong in Sql&lt;/li&gt;&lt;li&gt;Corporate Banking knowledge/Experience would be added advantage.&lt;/li&gt;&lt;li&gt;Proven experience in test plan design&lt;/li&gt;&lt;li&gt;Understanding of the software development lifecycle and the deliverable created during the development lifecycle&lt;/li&gt;&lt;li&gt;Strong analytical skills, creative and critical thinking ability and problem-solving skills&lt;/li&gt;&lt;li&gt;Familiarity with relevant quality assurance industry standard best practices and methodologies&lt;/li&gt;&lt;li&gt;Dedication to customer satisfaction&lt;/li&gt;&lt;li&gt;Excellent communication skills&lt;/li&gt;&lt;li&gt;Problem solving skills&lt;/li&gt;&lt;li&gt;Excellent time management skill&lt;/li&gt;&lt;/ul&gt;&lt;/div&gt;&lt;div&gt;&lt;div&gt;&lt;b&gt;&lt;b&gt;Mandatory Competencies&lt;/b&gt;&lt;/b&gt;&lt;br&gt;&lt;/div&gt;&lt;div&gt;&lt;div&gt;ETL - ETL - Tester&lt;/div&gt;&lt;div&gt;Beh - Communication&lt;/div&gt;&lt;div&gt;Database - Database Programming - SQL&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Data warehouse Developer&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Iris Software&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Noida, Gurugram&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/804430/etl-tester-senior-engineer-at-iris-software/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/804430/etl-tester-senior-engineer-at-iris-software/</link>
  <title>[Full Time] ETL Tester - Senior Engineer at Iris Software</title>
  <dc:date>Mon, 12 Jan 2026 13:43:50 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/805692/technical-lead-oracle-cx-consultant-at-birlasoft/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;div&gt; &lt;div&gt; &lt;div&gt; Area(s) of responsibility &lt;/div&gt; &lt;div&gt; &lt;p&gt;Primary Skill: Oracle RightNow Cloud Service&lt;/p&gt;&lt;br&gt;&lt;p&gt;Required Additional skill: Oracle Sales Cloud Sales Force Automation&lt;/p&gt;&lt;br&gt;&lt;p&gt;Job Description: Oracle RightNow Service Cloud Consultant&lt;/p&gt;&lt;br&gt;&lt;p&gt;Role Overview:&lt;/p&gt;&lt;br&gt;&lt;p&gt;The Oracle CX Consultant will be responsible for implementing, customizing, and supporting Oracle Customer Experience (CX) solutions, including Service Cloud, Sales Cloud and CDM. The ideal candidate will possess a blend of technical, functional, and soft skills to deliver high-quality solutions and drive business value for clients.&lt;/p&gt;&lt;p&gt;Key Responsibilities&lt;/p&gt;&lt;br&gt;&lt;p&gt;Technical Skills:&lt;/p&gt;&lt;br&gt;&lt;p&gt; Develop and customize Oracle RightNow Service Cloud components, including (Mandatory to have)&lt;/p&gt;&lt;br&gt;&lt;p&gt;o Reports and Dashboards/Scheduling&lt;/p&gt;&lt;br&gt;&lt;p&gt;o Widget Customizations, Customer Portal configuration&lt;/p&gt;&lt;br&gt;&lt;p&gt;o Custom Scripts and Addins&lt;/p&gt;&lt;br&gt;&lt;p&gt;o Javascript API, Custom Process (CPM), Custom scripts&lt;/p&gt;&lt;br&gt;&lt;p&gt;o Integrations and Workspace Design&lt;/p&gt;&lt;br&gt;&lt;p&gt;o REST API, PHP, ROQL&lt;/p&gt;&lt;br&gt;&lt;p&gt; Configure and enhance Oracle Sales Cloud features (Good to have)&lt;/p&gt;&lt;br&gt;&lt;p&gt;o Sandbox, Pages, Work areas, Workspaces&lt;/p&gt;&lt;br&gt;&lt;p&gt;o OTBI and BIP reports&lt;/p&gt;&lt;br&gt;&lt;p&gt;o Groovy Scripts, Oracle SQL, Application Composer&lt;/p&gt;&lt;br&gt;&lt;p&gt;o Sales plugin in Outlook, Import/Export, Integrations (REST/SOAP API), Workflow&lt;/p&gt;&lt;br&gt;&lt;p&gt;Qualifications&lt;/p&gt;&lt;br&gt;&lt;p&gt; Bachelor s degree in Computer Science, Information Technology, or related field&lt;/p&gt;&lt;br&gt;&lt;p&gt; 5+ years of experience in Oracle CX implementation and support&lt;/p&gt;&lt;br&gt;&lt;p&gt; Hands-on expertise in Oracle RightNow Service Cloud, Sales Cloud and CDM modules&lt;/p&gt;&lt;br&gt;&lt;p&gt; Proficiency in scripting (Groovy, JavaScript, PHP), API integrations, and report development&lt;/p&gt;&lt;br&gt;&lt;p&gt; Experience in functional configuration and business process mapping&lt;/p&gt;&lt;br&gt;&lt;p&gt; Excellent communication and interpersonal skills&lt;/p&gt;&lt;br&gt;&lt;p&gt; Good to have experience in Oracle Eloqua&lt;/p&gt;&lt;br&gt;&lt;p&gt;Preferred Attributes&lt;/p&gt;&lt;br&gt;&lt;p&gt; Oracle CX certifications&lt;/p&gt;&lt;br&gt;&lt;p&gt; Experience working with global teams and clients&lt;/p&gt;&lt;br&gt;&lt;p&gt; Ability to work independently and as part of a team&lt;/p&gt;&lt;br&gt;&lt;p&gt; Strong analytical and documentation skills&lt;/p&gt;&lt;br&gt; &lt;/div&gt; &lt;/div&gt; &lt;/div&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Birlasoft&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Pune&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/805692/technical-lead-oracle-cx-consultant-at-birlasoft/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/805692/technical-lead-oracle-cx-consultant-at-birlasoft/</link>
  <title>[Full Time] Technical Lead - Oracle CX Consultant at Birlasoft</title>
  <dc:date>Mon, 12 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/806392/hiring-cloud-sql-dba-sun-technology-inc-bangalore-at-sun-technologies/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;p&gt;Hi,&lt;/p&gt;&lt;br&gt;&lt;p&gt;Greetings from Sun Technology Integrators!!&lt;/p&gt;&lt;br&gt;&lt;p&gt;This is regarding a job opening with Sun Technology Integrators, Bangalore. Please find below the &lt;strong&gt;job description&lt;/strong&gt; for your reference. Kindly let me know your interest and share your updated CV to &lt;strong&gt;nandinis@suntechnologies.com &lt;/strong&gt;ASAP.&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Please let me know, if any of your friends are looking for a job change. Kindly share the references. &lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Please Note: WFO- Work From Office (We do not have Hybrid or Work From Home option) &lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Shift Details: 5:30PM to 2:30AM-2 ways free cab facility(Pick Up+Drop)&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Years of Exp-5 to 20 years&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Interview Mode- 2 Virtual Technical interviews &lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Mandatory Skills :&lt;/strong&gt; &lt;/p&gt;&lt;p&gt;MySQL+PostgreSQL+GCP&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Permanent Position&lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt; Good experience as a Cloud DBA &lt;/p&gt;&lt;ul&gt;&lt;li&gt; Good experience with MySQL, Postgres Administration&lt;br&gt;  Cloud administration skills in GCP and Azure&lt;br&gt;  GCP certification preferred - GCP Administrator/Architect Certification and/or Cloud Security Certification&lt;br&gt;  Experience with Cloud Migrations and Transformations&lt;br&gt;  Experience defining, developing, and implementing automated and repeatable cloud automation processes for databases (monitoring/alerting, backup, recovery, high availability, upgrades, etc.)&lt;br&gt;  Working knowledge and experience with DevOps (including infrastructure as code) tools, practice, and site reliability engineering principles &lt;br&gt;  Experience implementing best practices for monitoring and proactively identifying performance issues within Cloud database technologies&lt;br&gt;  Familiar with monitoring tools like percona, Grafana &amp;amp; MonYog&lt;br&gt;  Experience performing database analysis, architecture, and application development&lt;br&gt;  Considerable knowledge of different database management systems, query tools, database schemas, and distributed systems&lt;br&gt;  Strong experience with SQL and PL/SQL including solid troubleshooting &amp;amp; SQL tuning skills&lt;br&gt;  Strong analytical and problem-solving skills to address mission critical production database issues&lt;br&gt;  Excellent communication and organization skills with ability to operate in a lean, fast-paced organization&lt;br&gt;  Ability to write/modify Unix shell scripts&lt;/li&gt;&lt;/ul&gt;&lt;br&gt;&lt;p&gt; Thanks and Regards,Nandini S | Sr.Technical Recruiter &lt;strong&gt;Sun Technology Integrators Pvt. Ltd.&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;nandinis@suntechnologies.com &lt;/li&gt;&lt;/ul&gt;&lt;p&gt;www.suntechnologies.com &lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Sun Technologies&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/806392/hiring-cloud-sql-dba-sun-technology-inc-bangalore-at-sun-technologies/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/806392/hiring-cloud-sql-dba-sun-technology-inc-bangalore-at-sun-technologies/</link>
  <title>[Full Time] Hiring Cloud Sql  Dba @ Sun Technology Inc, Bangalore at Sun Technologies</title>
  <dc:date>Mon, 12 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/806397/hiring-oracle-dba-sun-technology-inc-bangalore-at-sun-technologies/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;Hi,&lt;/p&gt;&lt;br&gt;&lt;p&gt;Greetings from Sun Technology Integrators!!&lt;/p&gt;&lt;br&gt;&lt;p&gt;This is regarding a job opening with Sun Technology Integrators, Bangalore. Please find below the &lt;strong&gt;job description&lt;/strong&gt; for your reference. Kindly let me know your interest and share your updated CV to &lt;strong&gt;nandinis@suntechnologies.com &lt;/strong&gt;ASAP.&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Please let me know, if any of your friends are looking for a job change. Kindly share the references. &lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Please Note: WFO- Work From Office (We do not have Hybrid or Work From Home option) &lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Shift Details: 5:30PM to 2:30AM-2 ways free cab facility(Pick Up+Drop)&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Years of Exp-5 to 20 years&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Interview Mode- 2 Virtual Technical interviews &lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Mandatory Skills :&lt;/strong&gt; &lt;/p&gt;&lt;p&gt;&lt;strong&gt;Data Guard,Golden gate, High Availability , RAC , OEM , OCI ,Private/Public Ips , Clustering , Subnet, Node etc&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Preferred Skills(Optional) : Exa Data,Cloud, Oracle AI Platform&lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Permanent Position&lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;Overall 5- 7+ years of experience in managing databases and high-volume enterprise systems.&lt;/p&gt;&lt;p&gt;Extensive experience in managing Oracle databases on Linux and Solaris platforms.&lt;/p&gt;&lt;p&gt;Extensive experience in managing Oracle databases on Exadata and Supercluster Engineered Systems.&lt;/p&gt;&lt;p&gt;Well versed with Relational and Oracle database concepts, High Availability.&lt;/p&gt;&lt;p&gt;Must have experience with working in mid to large size production environment across multiple Datacenters.&lt;/p&gt;&lt;p&gt;Experience in working and coordinating with Business Teams, Application Teams, Oracle Support etc.&lt;/p&gt;&lt;p&gt;Experience in working with development and other support teams to participate in release and change management processes to implement new features/functions as well apply mandatory patches or upgrades.&lt;/p&gt;&lt;p&gt;Ability to effectively share technical information, communicate technical issues and solutions to all levels of business.&lt;/p&gt;&lt;p&gt;Experience with taking a Backup and restore DB data to secure the system in the event of failure.&lt;/p&gt;&lt;p&gt;Knowledge on public clouds and services offerings like GCP, Azure etc.&lt;/p&gt;&lt;p&gt;Implement proactive monitoring of all databases and provide recommendations.&lt;/p&gt;&lt;p&gt;Knowledge of Oracle best practices &amp;amp; Oracle data modeling.&lt;/p&gt;&lt;p&gt;Experience with Monitoring Performance using Oracle OEM or other similar monitoring tools.&lt;/p&gt;&lt;br&gt;&lt;p&gt; Thanks and Regards,Nandini S | Sr.Technical Recruiter &lt;strong&gt;Sun Technology Integrators Pvt. Ltd.&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;nandinis@suntechnologies.com &lt;/li&gt;&lt;/ul&gt;&lt;p&gt;www.suntechnologies.com &lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;DBA / Data warehousing - Other&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Sun Technologies&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/806397/hiring-oracle-dba-sun-technology-inc-bangalore-at-sun-technologies/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/806397/hiring-oracle-dba-sun-technology-inc-bangalore-at-sun-technologies/</link>
  <title>[Full Time] Hiring Oracle DBA @ Sun Technology Inc, Bangalore at Sun Technologies</title>
  <dc:date>Mon, 12 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/806531/hiring-oracle-administrator-sun-technology-inc-bangalore-at-sun-technologies/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;br&gt;&lt;p&gt;Hi,&lt;/p&gt;&lt;br&gt;&lt;p&gt;Greetings from Sun Technology Integrators!!&lt;/p&gt;&lt;br&gt;&lt;p&gt;This is regarding a job opening with Sun Technology Integrators, Bangalore. Please find below the &lt;strong&gt;job description&lt;/strong&gt; for your reference. Kindly let me know your interest and share your updated CV to &lt;strong&gt;nandinis@suntechnologies.com &lt;/strong&gt;ASAP.&lt;/p&gt;&lt;br&gt;&lt;p&gt;Kindly share the below details.&lt;/p&gt;&lt;br&gt;&lt;p&gt;C.CTC-&lt;/p&gt;&lt;p&gt;E.CTC-&lt;/p&gt;&lt;p&gt;Notice Period-&lt;/p&gt;&lt;p&gt;Current location-&lt;/p&gt;&lt;p&gt;Are you serving Notice period/immediate-?&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Please let me know, if any of your friends are looking for a job change. Kindly share the references. &lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Please Note: WFO- Work From Office (We do not have Hybrid or Work From Home option) &lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Shift Details: 5:30PM to 2:30AM-2 ways free cab facility(Pick Up+Drop)&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Years of Exp-5 to 20 years&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Interview Mode- 2 Virtual Technical interviews &lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Mandatory Skills :&lt;/strong&gt; &lt;/p&gt;&lt;p&gt;&lt;strong&gt;Data Guard,Golden gate, High Availability , RAC , OEM , OCI ,Private/Public Ips , Clustering , Subnet, Node etc&lt;/strong&gt;&lt;/p&gt;&lt;p&gt;&lt;strong&gt;Preferred Skills(Optional) : Exa Data,Cloud, Oracle AI Platform&lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;Permanent Position&lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;p&gt;Overall 5- 7+ years of experience in managing databases and high-volume enterprise systems.&lt;/p&gt;&lt;p&gt;Extensive experience in managing Oracle databases on Linux and Solaris platforms.&lt;/p&gt;&lt;p&gt;Extensive experience in managing Oracle databases on Exadata and Supercluster Engineered Systems.&lt;/p&gt;&lt;p&gt;Well versed with Relational and Oracle database concepts, High Availability.&lt;/p&gt;&lt;p&gt;Must have experience with working in mid to large size production environment across multiple Datacenters.&lt;/p&gt;&lt;p&gt;Experience in working and coordinating with Business Teams, Application Teams, Oracle Support etc.&lt;/p&gt;&lt;p&gt;Experience in working with development and other support teams to participate in release and change management processes to implement new features/functions as well apply mandatory patches or upgrades.&lt;/p&gt;&lt;p&gt;Ability to effectively share technical information, communicate technical issues and solutions to all levels of business.&lt;/p&gt;&lt;p&gt;Experience with taking a Backup and restore DB data to secure the system in the event of failure.&lt;/p&gt;&lt;p&gt;Knowledge on public clouds and services offerings like GCP, Azure etc.&lt;/p&gt;&lt;p&gt;Implement proactive monitoring of all databases and provide recommendations.&lt;/p&gt;&lt;p&gt;Knowledge of Oracle best practices &amp;amp; Oracle data modeling.&lt;/p&gt;&lt;p&gt;Experience with Monitoring Performance using Oracle OEM or other similar monitoring tools.&lt;/p&gt;&lt;p&gt;OCI,Oracle AI Platform&lt;/p&gt;&lt;br&gt;&lt;p&gt; Thanks and Regards,Nandini S | Sr.Technical Recruiter &lt;strong&gt;Sun Technology Integrators Pvt. Ltd.&lt;/strong&gt;&lt;/p&gt;&lt;ul&gt;&lt;li&gt;nandinis@suntechnologies.com &lt;/li&gt;&lt;/ul&gt;&lt;p&gt;www.suntechnologies.com &lt;/p&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Sun Technologies&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Bengaluru&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/806531/hiring-oracle-administrator-sun-technology-inc-bangalore-at-sun-technologies/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/806531/hiring-oracle-administrator-sun-technology-inc-bangalore-at-sun-technologies/</link>
  <title>[Full Time] Hiring Oracle Administrator @ Sun Technology Inc, Bangalore at Sun Technologies</title>
  <dc:date>Mon, 12 Jan 2026 12:00:00 +0530</dc:date>
 </item>
 <item rdf:about="https://ineojobs.com/job/804874/postgresql-developer-at-centilytics/">
  <description>&lt;h4&gt;Job Description&lt;/h4&gt;&lt;p&gt;&lt;strong&gt;&lt;u&gt;Job Overview:&lt;/u&gt;&lt;/strong&gt; &lt;/p&gt;&lt;p&gt;We are seeking a talented PostgreSQL Developer with 2-4 years of experience to join our data engineering or application development team. The ideal candidate will demonstrate strong expertise in database development on PostgreSQL, including schema design, writing and optimizing complex queries, and supporting data-driven applications. The role also encompasses providing 24 by 7 support as per business need, including planned and scheduled activities during weekends or night hours.&lt;/p&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;&lt;u&gt;Key Responsibilities:&lt;/u&gt;&lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;Design, develop, and maintain relational database structures, including tables, views, indexes, and constraints in PostgreSQL.&lt;/li&gt;&lt;li&gt;Write efficient SQL and PL/pgSQL functions, stored procedures, and triggers to support application logic and business requirements.&lt;/li&gt;&lt;li&gt;Optimize and refactor queries for performance, ensuring minimal latency and efficient resource consumption.&lt;/li&gt;&lt;li&gt;Collaborate with backend and application developers to translate business requirements into scalable database solutions.&lt;/li&gt;&lt;li&gt;Assist with data migration, ETL, and integration tasks between various sources and PostgreSQL.&lt;/li&gt;&lt;li&gt;Enforce data integrity, security, and compliance best practices throughout the database lifecycle.&lt;/li&gt;&lt;li&gt;Monitor and troubleshoot database performance, identify bottlenecks, and propose solutions.&lt;/li&gt;&lt;li&gt;Participate in database version control and release management following established SDLC practices.&lt;/li&gt;&lt;li&gt;Contribute to documentation of database structures, processes, and procedures.&lt;/li&gt;&lt;li&gt;Support production issues, analyze and resolve database-related bugs, and participate in on-call rotation as required.&lt;/li&gt;&lt;li&gt;Provide 24 by 7 support as part of an on-call rotation, ensuring critical production issues are addressed promptly at any time.&lt;/li&gt;&lt;li&gt;Perform planned and scheduled maintenance activities (e.g., patching, upgrades, data migrations) during night hours or weekends to minimize business disruption and maintain service availability.&lt;/li&gt;&lt;/ul&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;&lt;u&gt;Required Skills &amp;amp; Qualifications:&lt;/u&gt;&lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;Education: Bachelors degree in Computer Science, Information Technology, or a related field, or equivalent hands-on experience.&lt;/li&gt;&lt;li&gt;Experience: 2-3 years of practical experience as a PostgreSQL/PGSQL developer.&lt;/li&gt;&lt;/ul&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;&lt;u&gt;Technical Skills:&lt;/u&gt;&lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;Strong knowledge of SQL and PL/pgSQL programming.&lt;/li&gt;&lt;li&gt;Experience with schema design, normalization, and data modeling.&lt;/li&gt;&lt;li&gt;Proficiency in building and optimizing complex queries and stored procedures.&lt;/li&gt;&lt;li&gt;Familiarity with database indexing, partitioning, and other optimization techniques.&lt;/li&gt;&lt;li&gt;Understanding of database security, access controls, and role management.&lt;/li&gt;&lt;li&gt;Exposure to ETL processes, data migration, and integration.&lt;/li&gt;&lt;li&gt;Experience with Linux/Unix environments for general database operations.&lt;/li&gt;&lt;li&gt;Familiarity with source code/version control systems (e.g., Git).&lt;/li&gt;&lt;li&gt;Exposure to cloud-based PostgreSQL deployments (e.g., AWS RDS, Azure Database for PostgreSQL) is a plus.&lt;/li&gt;&lt;li&gt;Knowledge of reporting tools or extensions (e.g., PostGIS, TimescaleDB) is an advantage.&lt;/li&gt;&lt;/ul&gt;&lt;br&gt;&lt;p&gt;&lt;strong&gt;&lt;u&gt;Desired Attributes:&lt;/u&gt;&lt;/strong&gt;&lt;/p&gt;&lt;br&gt;&lt;ul&gt;&lt;li&gt;Strong analytical and troubleshooting skills for solving database and data issues.&lt;/li&gt;&lt;li&gt;Effective communication and collaboration skills for cross-functional teamwork.&lt;/li&gt;&lt;li&gt;Ability to manage multiple tasks and deadlines in a fast-paced environment.&lt;/li&gt;&lt;li&gt;Eagerness to learn new technologies and stay updated with PostgreSQL advancements.&lt;/li&gt;&lt;li&gt;Commitment to maintaining standards and best practices in database development.&lt;/li&gt;&lt;li&gt;Flexibility to work outside standard business hours for planned maintenance or urgent support requirements.&lt;/li&gt;&lt;/ul&gt;&lt;h4&gt;Job Classification&lt;/h4&gt;&lt;b&gt;Industry: &lt;/b&gt;IT Services &amp;amp; Consulting&lt;/br&gt;&lt;b&gt;Functional Area / Department: &lt;/b&gt;Engineering - Software &amp;amp; QA&lt;/br&gt;&lt;b&gt;Role Category: &lt;/b&gt;DBA / Data warehousing&lt;/br&gt;&lt;b&gt;Role: &lt;/b&gt;Database Administrator&lt;/br&gt;&lt;b&gt;Employement Type: &lt;/b&gt;Full time&lt;/br&gt;&lt;h4&gt;Contact Details:&lt;/h4&gt;&lt;b&gt;Company: &lt;/b&gt;Centilytics&lt;/br&gt;&lt;b&gt;Location(s): &lt;/b&gt;Noida, Gurugram&lt;/br&gt;&lt;b&gt;&lt;br /&gt;&lt;br /&gt;&lt;a href=&quot;https://ineojobs.com/job/804874/postgresql-developer-at-centilytics/&quot;&gt;Apply&lt;/a&gt;&lt;br /&gt;</description>
  <link>https://ineojobs.com/job/804874/postgresql-developer-at-centilytics/</link>
  <title>[Full Time] Postgresql Developer at Centilytics</title>
  <dc:date>Mon, 12 Jan 2026 07:54:22 +0530</dc:date>
 </item>
</rdf:RDF>