Search This Blog

2024/05/04

Dynamoose & Express.js :CRUD Operations

Today we explore dynamoose a ODM or dynamodb.I am using local dynamodb
instead of cloud instance.

As usual I am creating by base express template using generator.

express --view=ejs dynamoose-express

Now i will create a demo.js in route & mount it in app.js at mountpoint 'demo'

Content of demo.js

var express = require("express");
var router = express.Router();
var User = require("../models/userModel");

/* GET home page. */
router.post("/createUser", async function (req, res, next) {
try {
// Use the model
const user = new User({
name: req.body.name,
email: req.body.email,
age: req.body.age,
});

// Save the user to DynamoDB
let savedUser = await user.save({ overwrite: false });
savedUser.createdAt = new Date(savedUser.createdAt)

res.json({
success: true,
message: "User created Successully",
data: savedUser,
});
} catch (exp) {
res.json({ success: false, message: exp.message });
}
});

//get record by email
router.get("/getByEmail/:email", async function (req, res, next) {
try {
let user = await User.query("email").eq(req.params.email).exec();
res.json({ success: true, message: "User found", data: user });
} catch (exp) {
res.json({ success: false, message: exp.message });
}
});

//get record by user id
router.get("/getByUserId/:userId", async function (req, res, next) {
try {
let user = await User.query("userId").eq(req.params.userId).exec();
res.json({ success: true, message: "User found", data: user });
} catch (exp) {
res.json({ success: false, message: exp.message });
}
});

//get all records
router.get("/getAll", async function (req, res, next) {
try {
const users = await User.scan().exec();
res.json({ success: true, message: "User found", data: users });
} catch (exp) {
res.json({ success: false, message: exp.message });
}
});

//greater than
router.get("/youngerThan/:age", async function (req, res, next) {
try {
const users = await User.scan()
.filter("age")
.gt(parseInt(req.params.age))
.exec();
res.json({ success: true, message: "User found Successully", data: users });
} catch (exp) {
res.json({ success: false, message: exp.message });
}
});

//delete
router.delete("/:userId", async (req, res, next) => {
try {
let myUser = await User.query("userId").eq(req.params.userId).exec();
if (myUser.length) {
await myUser[0].delete(); // Delete the user with the specified userId
res.json({ success: true, message: "User deleted successfully" });
} else {
res.json({ success: false, message: "User not found" });
}
} catch (exp) {
res.json({ success: false, message: exp.message });
}
});

//update
router.put("/:userId", async (req, res, next) => {
try {
let myUser = new User({
name: req.body.name,
email: req.body.email,
age: req.body.age,
userId: req.params.userId,
updatedAt: Date.now(),
});

myUser = await myUser.save({ overwrite: true });

myUser.updatedAt = new Date(myUser.updatedAt)
myUser.createdAt = new Date(myUser.createdAt)

res.json({
success: true,
message: "User saved successfully",
data: myUser,
});
} catch (exp) {
res.json({ success: false, message: exp.message });
}
});

module.exports = router;

Changes required in app.js

var demoRouter = require('./routes/demo');
&
app.use('/demo', demoRouter);

Now lets install required npm packages

Please check the packages to install from package.json below

{
"name": "dynamoose-express",
"version": "0.0.0",
"private": true,
"scripts": {
"start": "node ./bin/www"
},
"dependencies": {
"cookie-parser": "~1.4.4",
"debug": "~2.6.9",
"dotenv": "^16.4.5",
"dynamoose": "^4.0.1",
"ejs": "~2.6.1",
"express": "^4.19.2",
"http-errors": "~1.6.3",
"morgan": "~1.9.1",
"uuid": "^9.0.1"
}
}

Now add connection.js at root

const dynamoose = require('dynamoose');

dynamoose.aws.ddb.local("http://localhost:8000")
module.exports = dynamoose


create a models folder and add userModel.js into it as

const dynamoose = require("../connection");
const { v4: uuidv4 } = require('uuid');
const UserSchema = new dynamoose.Schema(
{
userId: {
type: String,
hashKey: true,
default:uuidv4()
},
email: {
type: String,
index: {
name: "EmailIndex",
global: true,
rangeKey: "userId",
},
},
name: {
type: String,
},
age: Number,
createdAt: {
type: Date,
default: Date.now,
},
updatedAt: {
type: Date,
default: null,
},
},
{
throughput: "ON_DEMAND", // or { read: 5, write: 5 }
}
);

const User = dynamoose.model("User", UserSchema);
module.exports = User;

My .env file is as below

AWS_ACCESS_KEY_ID = "fakeMyKeyId"
AWS_SECRET_ACCESS_KEY = "fakeSecretAccessKey"
AWS_REGION = "fakeRegion"

You can run npm install meanwhile if npm packages not yet installed.

Now to check & rest endpoint created run npm start.

Endpoint:

For creation of User:

curl --location 'http://localhost:3000/demo/createUser' \
--header 'Content-Type: application/json' \
--data-raw '{
"name":"Vijay Desai",
"email":"Vijay@gmail.com",
"age":24
}'

Output:
{
"success": true,
"message": "User created Successully",
"data": {
"name": "Vijay Desai",
"email": "Vijay@gmail.com",
"age": 24,
"userId": "fda3f7e2-c835-45ff-853c-b93f8b26cb93",
"createdAt": "2024-05-04T13:28:45.554Z"
}
}

For Updation of Existing User.

curl --location --request PUT 'http://localhost:3000/demo/2' \
--header 'Content-Type: application/json' \
--data-raw '{
"name": "Sangram Desai",
"email": "sangram@gmail.com",
"age": 43
}'

Output:
{
"success": true,
"message": "User saved successfully",
"data": {
"name": "Sangram Desai",
"email": "sangram@gmail.com",
"age": 43,
"userId": "2",
"updatedAt": "2024-05-04T13:31:30.480Z",
"createdAt": "2024-05-04T13:31:30.483Z"
}
}

For viewing list o all users:
curl --location 'http://localhost:3000/demo/getAll'

For getting user by its userId:

curl --location 'http://localhost:3000/demo/getByUserId/fda3f7e2-c835-45ff-853c-b93f8b26cb93'

There are some other endpoint that you can explore.

The complete code of this project is available at
https://github.com/gitsangramdesai/dynamoose-express.

In DynamoDb there is no inbuild functionlity that is similar to auto-increment id in mysql,
people usually use uuid's for primary key.primary key in Dynamodb is made up of partition key
& sort key.Records with same partition key will be saved in one partition & sorted by
sort key(range key).You can also define primary key without sort key(range key)

AWS DynamoDB: How to install locally in Ubuntu?

It is possible to install dynamodb locally,lets explore how.

First Download jar file

wget https://d1ni2b6xgvw0s0.cloudfront.net/v2.x/dynamodb_local_latest.tar.gz

Now extract the the archieve.

Rename extracted folder as dynamodb

Copy it to location where you want to install binary

mv dynamodb /usr/share/

Now on command line go to folder in which dynamodb is copied,
in this case /usr/share/dynamodb.

cd /usr/share/dynamodb

Test i it runs from terminal by running this

sudo java -Djava.library.path=./DynamoDBLocal_lib -jar DynamoDBLocal.jar -sharedDb

Now if you want to test the server each time you would have to run this command
To keep it running each time without running manually we need a service.

To create a service

sudo nano /etc/systemd/system/dynamodb.service

add ollowing to it

[Unit]
Description=DynamoDB Service
[Service]
User=root
WorkingDirectory=/usr/share/dynamodb
ExecStart=/usr/share/dynamodb/dynamodb.sh
SuccessExitStatus=143
TimeoutStopSec=10
Restart=on-failure
RestartSec=5
[Install]
WantedBy=multi-user.target

save & exit


Now we will create a shell script file referred in service above.

cd /usr/share/dynamodb
nano dynamodb.sh

add

#!/bin/sh
sudo java -Djava.library.path=./DynamoDBLocal_lib -jar DynamoDBLocal.jar -sharedDb


save and exit.

Make shell script executable file

chmod u+x dynamodb.sh

Now let system know that we have created a new service

sudo systemctl daemon-reload
sudo systemctl enable dynamodb
sudo systemctl start dynamodb
sudo systemctl status dynamodb


output of last command in my case is like below

● dynamodb.service - Dynamo DB Local Service
Loaded: loaded (/etc/systemd/system/dynamodb.service; enabled; vendor preset: enabled)
Active: active (running) since Sat 2024-05-04 11:13:52 IST; 11min ago
Main PID: 33499 (dynamodb.sh)
Tasks: 41 (limit: 18708)
Memory: 150.8M
CPU: 4.333s
CGroup: /system.slice/dynamodb.service
├─33499 /bin/sh /usr/share/dynamodb/dynamodb.sh
├─33500 sudo java -Djava.library.path=./DynamoDBLocal_lib -jar DynamoDBLocal.jar -sharedDb
└─33501 java -Djava.library.path=./DynamoDBLocal_lib -jar DynamoDBLocal.jar -sharedDb

May 04 11:13:52 sangram-Inspiron-14-5430 sudo[33500]: root : PWD=/usr/share/dynamodb ; USER=root ; COMMAND=/usr/bin/java -Djava.library.path=./DynamoDBLocal_lib -jar DynamoDBLocal.jar -sharedDb
May 04 11:13:52 sangram-Inspiron-14-5430 sudo[33500]: pam_unix(sudo:session): session opened for user root(uid=0) by (uid=0)
May 04 11:13:53 sangram-Inspiron-14-5430 dynamodb.sh[33501]: Initializing DynamoDB Local with the following configuration:
May 04 11:13:53 sangram-Inspiron-14-5430 dynamodb.sh[33501]: Port: 8000
May 04 11:13:53 sangram-Inspiron-14-5430 dynamodb.sh[33501]: InMemory: false
May 04 11:13:53 sangram-Inspiron-14-5430 dynamodb.sh[33501]: Version: 2.4.0
May 04 11:13:53 sangram-Inspiron-14-5430 dynamodb.sh[33501]: DbPath: null
May 04 11:13:53 sangram-Inspiron-14-5430 dynamodb.sh[33501]: SharedDb: true
May 04 11:13:53 sangram-Inspiron-14-5430 dynamodb.sh[33501]: shouldDelayTransientStatuses: false
May 04 11:13:53 sangram-Inspiron-14-5430 dynamodb.sh[33501]: CorsParams: null


Now we need to install awscli on ubuntu as ollows

curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip"
-o "awscliv2.zip"
unzip awscliv2.zip
sudo ./aws/install


Now run aws configure
and give below response to when prompted.

AWS Access Key ID [****************yId"]: fakeMyKeyId
AWS Secret Access Key [****************Key"]: fakeSecretAccessKey
Default region name ["fakeRegion"]: fakeRegion
Default output format [None]:


The response need to be given as given above.


Now you can check lists o tables in dynamodb by running follwoing command

aws dynamodb list-tables --endpoint-url http://localhost:8000

You may also like to check how to do CRUD oprtion in dynamoDb using exprress
https://msdotnetbuddy.blogspot.com/2023/05/working-with-dynamo-db.html.

References:
https://medium.com/aws-lambda-serverless-developer-guide-with-hands/
amazon-dynamodb-primary-key-partition-key-and-sort-key-how-to-choose-right-
key-for-dynamodb-ea5673cb87c0

2024/04/30

Sequelize-Postgres-Express:Upload file to database

Today we will explore how to upload an file (image) into

mysql database using sequelize.

Lets first create a express application using express generator.

express --view=ejs express-postgres-upload-file

Here is my package.json whivh you can check to install required npm
packages,

{
"name": "express-mysql-upload-file",
"version": "0.0.0",
"private": true,
"scripts": {
"start": "node ./bin/www"
},
"dependencies": {
"cookie-parser": "~1.4.4",
"debug": "~2.6.9",
"dotenv": "^16.4.5",
"ejs": "~2.6.1",
"express": "~4.16.1",
"http-errors": "~1.6.3",
"morgan": "~1.9.1",
"multer": "^1.4.5-lts.1",
"pg": "^8.11.5",
"sequelize": "^6.37.3"
}
}

Here we are installing multer,dotenv,sequelize,mysql2 packages.

Run npm i.

create .env file in root folder with content

PG_USER=sangram
PG_PASSWORD="sangram#81"
PG_PORT=5432
PG_DATABASE=playground
PG_SERVER=localhost


Now add upload.js in root folder with ollowing content.

var multer = require("multer");
var storage = multer.diskStorage({
destination: (req, file, cb) => {
cb(null, "./public/uploads/profile_pic/");
},
filename: function (req, file, cb) {
var fileparts = file.originalname.split(".");
var ext = fileparts[fileparts.length - 1];
cb(null, file.fieldname + "-" + Date.now() + "." + ext);
},
});

var upload = multer({ storage: storage });

module.exports = upload;

create models folder in root location & add image.js in it.

module.exports = function (sequelize, DataTypes) {
const Image = sequelize.define('image', {
imageId: {
type: DataTypes.INTEGER,
autoIncrement: true,
primaryKey: true
},
mimeType: {
type: DataTypes.STRING,
},
fileName: {
type: DataTypes.STRING,
field: 'name'
},
data: {
type: DataTypes.BLOB("long"),
}
}, {
freezeTableName: true
});

return Image;
}

Now create index.js inside model folder with following content

let { sequelize, Sequelize } = require("../connection.js");

let db = {};
db.Images = require("./image.js")(Sequelize,sequelize);

db.Sequelize = Sequelize;
db.sequelize = sequelize;
module.exports = db;

Now add uploads folder in public folder & inside uploads folder
add profile_pic folder.

Now create demo.js inside rotes folder with following content.

var express = require("express");
var router = express.Router();
var db = require("../models");
var upload = require("../uploads");
var fs = require("fs");
var path = require("path");

router.post("/", upload.single("image"), async (req, res, next) => {
try {
let imageCreated = await db.Images.create({
mimeType: req.file.mimetype,
fileName: req.file.filename,
data: fs.readFileSync(
path.join("./public/uploads/profile_pic/" + req.file.filename)
),
});
res.json({
success: true,
message: "File Uploaded to Mysql Successfully",
data: imageCreated,
});
} catch (exp) {
res.json({ success: false, message: exp.message.toString() });
}
});


router.get("/:fileId", async (req, res) => {
try {
var imageFound = await db.Images.findOne({
where: { imageId: req.params.fileId },
});
var buffer = imageFound.data;
var mimeType = imageFound.mimeType;

res.contentType(mimeType);
res.send(buffer);
} catch (exp) {
res.json({ success: false, message: exp.message.toString() });
}
});

module.exports = router;

Inside app.js add

var demoRouter = require('./routes/demo');

and

app.use('/demo', demoRouter);

in suitable location.

Now we are ready to run our application.Usually testing can be done using
postman.

For uploading image

curl --location 'http://localhost:3000/demo' \
--form 'image=@"/home/sangram/Pictures/Photo.jpg"'

Output:
{
"success": true,
"message": "File Uploaded to Mysql Successfully",
"data": {
"imageId": 1,
"mimeType": "image/jpeg",
"fileName": "image-1714485923005.jpg",
"data":{contain binary data},
"updatedAt": "2024-04-30T13:07:35.512Z",
"createdAt": "2024-04-30T13:07:35.512Z"
}
}

Please notice imageId that we are going to use in next api call.

curl --location 'http://localhost:3000/demo/1'

This will output image uploaded in previous step.

The complete code of this application can be found at

https://github.com/gitsangramdesai/express-sequelize-pg-upload-file

Sequelize-MySQL-Express:Upload file to database

Today we will explore how to upload an file (image) into
mysql database using sequelize.

Lets first create a express application using express generator.

express --view=ejs express-mysql-upload-file

Here is my package.json whivh you can check to install required npm
packages,

{
"name": "express-mysql-upload-file",
"version": "0.0.0",
"private": true,
"scripts": {
"start": "node ./bin/www"
},
"dependencies": {
"cookie-parser": "~1.4.4",
"debug": "~2.6.9",
"dotenv": "^16.4.5",
"ejs": "~2.6.1",
"express": "~4.16.1",
"http-errors": "~1.6.3",
"morgan": "~1.9.1",
"multer": "^1.4.5-lts.1",
"mysql2": "^3.9.7",
"sequelize": "^6.37.3"
}
}

Here we are installing multer,dotenv,sequelize,mysql2 packages.

Run npm i.

create .env file in root folder with content

MYSQL_USER=root
MYSQL_PASSWORD="sangram#81"
MYSQL_PORT=3306
MYSQL_DATABASE=playground
MYSQL_SERVER=localhost


Now add upload.js in root folder with ollowing content.

var multer = require("multer");
var storage = multer.diskStorage({
destination: (req, file, cb) => {
cb(null, "./public/uploads/profile_pic/");
},
filename: function (req, file, cb) {
var fileparts = file.originalname.split(".");
var ext = fileparts[fileparts.length - 1];
cb(null, file.fieldname + "-" + Date.now() + "." + ext);
},
});

var upload = multer({ storage: storage });

module.exports = upload;

create models folder in root location & add image.js in it.

module.exports = function (sequelize, DataTypes) {
const Image = sequelize.define('image', {
imageId: {
type: DataTypes.INTEGER,
autoIncrement: true,
primaryKey: true
},
mimeType: {
type: DataTypes.STRING,
},
fileName: {
type: DataTypes.STRING,
field: 'name'
},
data: {
type: DataTypes.BLOB("long"),
}
}, {
freezeTableName: true
});

return Image;
}

Now create index.js inside model folder with following content

let { sequelize, Sequelize } = require("../connection.js");

let db = {};
db.Images = require("./image.js")(Sequelize,sequelize);

db.Sequelize = Sequelize;
db.sequelize = sequelize;
module.exports = db;

Now add uploads folder in public folder & inside uploads folder
add profile_pic folder.

Now create demo.js inside rotes folder with following content.

var express = require("express");
var router = express.Router();
var db = require("../models");
var upload = require("../uploads");
var fs = require("fs");
var path = require("path");

router.post("/", upload.single("image"), async (req, res, next) => {
try {
let imageCreated = await db.Images.create({
mimeType: req.file.mimetype,
fileName: req.file.filename,
data: fs.readFileSync(
path.join("./public/uploads/profile_pic/" + req.file.filename)
),
});
res.json({
success: true,
message: "File Uploaded to Mysql Successfully",
data: imageCreated,
});
} catch (exp) {
res.json({ success: false, message: exp.message.toString() });
}
});


router.get("/:fileId", async (req, res) => {
try {
var imageFound = await db.Images.findOne({
where: { imageId: req.params.fileId },
});
var buffer = imageFound.data;
var mimeType = imageFound.mimeType;

res.contentType(mimeType);
res.send(buffer);
} catch (exp) {
res.json({ success: false, message: exp.message.toString() });
}
});

module.exports = router;

Inside app.js add

var demoRouter = require('./routes/demo');

and

app.use('/demo', demoRouter);

in suitable location.

Now we are ready to run our application.Usually testing can be done using
postman.

For uploading image

curl --location 'http://localhost:3000/demo' \
--form 'image=@"/home/sangram/Pictures/Photo.jpg"'

Output:
{
"success": true,
"message": "File Uploaded to Mysql Successfully",
"data": {
"imageId": 1,
"mimeType": "image/jpeg",
"fileName": "image-1714482455509.jpg",
"data":{contain binary data},
"updatedAt": "2024-04-30T13:07:35.512Z",
"createdAt": "2024-04-30T13:07:35.512Z"
}
}

Please notice imageId that we are going to use in next api call.

curl --location 'http://localhost:3000/demo/1'

This will output image uploaded in previous step.

The complete code of this application can be found at

https://github.com/gitsangramdesai/express-sequelize-mysql-upload-file

MongoDb BSON:File Upload & Download in express.js using mongoose & multer

We know that a file as binary data can be saved in different DBMS system like
MySQL,MSSQL,Oracle. Today we will explore how a file can be uploaded in mongodb.
Their are two ways one is BSON data type in which size need to be less than 16
kb.

BSON is a binary serialization format used to store documents and make remote
procedure calls in MongoDB


Lets first create a express application using express generator.

express --view=ejs express-mongoose-bson


Now install required dependencies

By package.json look like

{
"name": "express-mongoose-bson",
"version": "0.0.0",
"private": true,
"scripts": {
"start": "node ./bin/www"
},
"dependencies": {
"cookie-parser": "~1.4.4",
"debug": "~2.6.9",
"dotenv": "^16.4.5",
"ejs": "~2.6.1",
"express": "~4.16.1",
"http-errors": "~1.6.3",
"mongoose": "^8.3.3",
"morgan": "~1.9.1",
"multer": "^1.4.5-lts.1"
}
}


Here we are installing multer,dotenv,mongoose packages.

Run npm i.

create .env file in root folder with content

MONGO_URI = mongodb://sangram:sangram%2381@127.0.0.1:27017
/phenixDb?directConnection=true&serverSelectionTimeoutMS=2000&
authSource=admin&appName=mongosh+2.2.3


Now add upload.js in root folder.

content of upload.js'

var multer = require("multer");
var storage = multer.diskStorage({
destination: (req, file, cb) => {
cb(null, './public/uploads/profile_pic/')
},
filename: function (req, file, cb) {
var fileparts = file.originalname.split(".");
var ext = fileparts[fileparts.length - 1];
cb(null, file.fieldname + "-" + Date.now() + "." + ext);
}
});

var upload = multer({ storage: storage });

module.exports = upload;

also add connection.js in root folder with following content.

var mongoose = require("mongoose");
mongoose.connect(process.env.MONGO_URI,{ });

module.exports = mongoose;


create models folder in root location & add image.js in it.

Here is content of image.js

var mongoose = require("mongoose");
var connection = require("../connection");

var imageSchema = new mongoose.Schema({
name: String,
desc: String,
filename: String,
img: {
data: Buffer,
contentType: String,
},
});
var imageModel = new mongoose.model("Image", imageSchema);

module.exports = {
ImageModel: imageModel,
ImageSchema: imageSchema,
};


Now add uploads folder in public folder & inside uploads folder
add profile_pic folder.

Now create demo.js inside rotes folder with follwoing content.

var express = require("express");
var router = express.Router();
var { ImageModel } = require("../models/image");
var upload = require("../upload");
var fs = require("fs");
var path = require("path");
var mongoose = require("mongoose");

router.post("/", upload.single("image"), async (req, res, next) => {
var obj = {
name: req.body.name,
desc: req.body.desc,
filename: req.file.filename,
img: {
data: fs.readFileSync(
path.join("./public/uploads/profile_pic/" + req.file.filename)
),
contentType: "image/png",
},
};

try {
var item = await ImageModel.create(obj);
res.json({ item: item });
} catch (exp) {
console.log(exp.message.toString());
res.status(500).send({ msg: "An error occurred", error: exp });
}
});

//get image --var mongoose = require("mongoose");
router.get("/:fileId", async (req, res) => {
try {
var result = await ImageModel.find(
{ _id: new mongoose.Types.ObjectId(req.params.fileId) },
{}
);
var fileName = result[0].filename;
var mimeType = result[0].img.contentType;
var buffer = result[0].img.data;

res.contentType(mimeType);
res.send(buffer);
} catch (err) {
res
.status(500)
.send({ msg: "An error occurred", error: err.message.toString() });
}
});

module.exports = router;

Inside app.js add

var demoRouter = require('./routes/demo');

and

app.use('/demo', demoRouter);

in suitable location.

Now we are ready to run our application.Usually testing can be done using
postman.



For uploading image

Here is curl for that
curl --location 'localhost:3000/demo' \
--form 'name="sangram"' \
--form 'desc="sangram desai"' \
--form 'image=@"/home/sangram/Pictures/Photo.jpg"'

Output:
{
"item": {
"name": "sangram",
"desc": "sangram desai",
"filename": "image-1714460945726.jpg",
"img": {
"data": {
"type": "Buffer",
"data": [some data here truncated]
}
},
"_id": "6630991140528fe1e6bfa8d4",
"__v": 0
}
}

Downloading File:

curl --location 'http://localhost:3000/demo/6630991140528fe1e6bfa8d4'

Here we are passing it of record created in previous curl call.

The complete code of this application can be found at

https://github.com/gitsangramdesai/express-mongoose-bson.

2024/04/29

MongoDb:GridFs uploading file to mongodb using multer

We know that it is possible to upload a file into DMBS like Mysql or MSSQL.
Today we will explore how to upload file to mongodb.

First create a express application as follows

express --view=ejs express-mongoose-gridfs

run
npm i

to install packageds.

Now we need

crypto,mongodb,mongoose,multer-gridfs-storage install them one by one.

Here is my package.json

{
"name": "express-mongoose-gridfs",
"version": "0.0.0",
"private": true,
"scripts": {
"start": "node ./bin/www"
},
"dependencies": {
"cookie-parser": "~1.4.4",
"crypto": "^1.0.1",
"debug": "~2.6.9",
"ejs": "~2.6.1",
"express": "~4.16.1",
"http-errors": "~1.6.3",
"mongodb": "^5.9.1",
"mongoose": "^8.3.2",
"morgan": "~1.9.1",
"multer-gridfs-storage": "^5.0.2"
}
}

Note that "mongodb": "^5.9.1", is essential for higher version code failed.

create connection.js at root of project with following content

let mongoose = require("mongoose");
let Schema = mongoose.Schema;
const GridFsStorage = require("multer-gridfs-storage");

const mongoURI =
"mongodb://sangram:sangram%2381@127.0.0.1:27017
/phenixDb?directConnection=true&serverSelectionTimeoutMS=2000
&authSource=admin&appName=mongosh+2.2.3";

const conn = mongoose.createConnection(mongoURI, { maxPoolSize: 10 });

//initialize gridfs
let bucket = new mongoose.mongo.GridFSBucket(conn, {
bucketName: "uploads",
});

module.exports = {
mongoose: mongoose,
connection: conn,
Schema: Schema,
mongoURI: mongoURI,
bucket: bucket,
};


create upload.js in root of project with following content.

const multer = require("multer");
const {GridFsStorage} = require("multer-gridfs-storage");
const crypto = require("crypto");
const {mongoURI} = require("./connection")
const path = require("path");

const storage = new GridFsStorage({
url: mongoURI,
file: (req, file) => {
return new Promise((resolve, reject) => {
crypto.randomBytes(16, (err, buf) => {
if (err) {
return reject(err);
}
const filename = buf.toString("hex") + path.extname(file.originalname);
const fileInfo = {
filename: filename,
bucketName: "uploads",
};
resolve(fileInfo);
});
});
},
});

const fileFilter = (req, file, cb) => {
if (file.fieldname == "profilePic") {
if ((file.mimetype).includes('jpeg')
|| (file.mimetype).includes('png')
|| (file.mimetype).includes('jpg')) {
cb(null, true);
} else {
cb(null, false);
}
}
if (file.fieldname == "resume") {
if ((file.mimetype).includes('doc')
|| (file.mimetype).includes('openxmlformats')) {
cb(null, true);
} else {
cb(null, false);
}
}
};

var upload = multer({ storage: storage, fileFilter: fileFilter,
limits: { fileSize: 1 * 1024 * 1024 } })

module.exports = {
upload: upload,
storage:storage
}

create foodItem.js in models folder at root of project with following
content

const {mongoose,Schema,connection} = require("../connection")

let foodItemSchema = new Schema({
name: String,
price: Number,
description: String,
picture : String,
createdOn: { type: Date, default: Date.now },
isDeleted: { type: Boolean, default: false },
});

//last one is actual collection name in db and its essential
let FoodItem = connection.model("FoodItem", foodItemSchema, "foodItem");

module.exports = {
FoodItem: FoodItem,
FoodItemSchema: foodItemSchema,
};


create demo.js in route folder with following content.

let express = require("express");
let router = express.Router();
let FoodItem = require("../models/foodItem").FoodItem;
let FoodItemSchema = require("../models/foodItem").FoodItemSchema;
let mongoose = require("mongoose");
let {upload} = require("../upload");
const path = require('path')
const fs = require('fs')
let {bucket} = require("../connection")

/* insert a new food item */
router.post("/insert",upload.single('resume'), async function (req, res, next) {
let foodItemName = req.body.name;
let foodItemDesc = req.body.description;
let foodItemPrice = req.body.price;

console.log("Files",)

try {
let foodItem = await FoodItem.create({
name: foodItemName,
description: foodItemDesc,
price: foodItemPrice,
picture:req.file.filename
});
res.json({
message: "food item added successfully",
error: "",
data: foodItem,
success: true,
});
} catch (err) {
res.json({
message: "unable to save food items",
error: err.message.toString(),
data: [],
success: false,
});
}
});


router.get("/image/:filename", async (req, res) => {
let foods = await FoodItem.find({ picture: req.params.filename })
if(foods.length){
bucket.openDownloadStreamByName(req.params.filename).
pipe(res);
}else{
res.json({"msg":"no file found"})
}
});



module.exports = router;


inside app.js

add
var demoRouter = require('./routes/demo');

and
app.use('/demo', demoRouter);

at suitable place

You can start your application using

npm start

Now you can test your application using postman

Testing Insert API:

curl --location 'http://localhost:3000/demo/insert' \
--form 'name=" Anda Burji\""' \
--form 'description=" made up of smashed eggs"' \
--form 'price=" 40"' \
--form 'resume=@"/home/sangram/Downloads/Cv-Sangram.docx"'

Output:
{
"message": "food item added successfully",
"error": "",
"data": {
"name": " Anda Burji\"",
"price": 40,
"description": " made up of smashed eggs",
"picture": "2dc4d730d659e9cd14d6c0506ada7ef9.docx",
"isDeleted": false,
"_id": "662f6c080fd2bdd1e18990bf",
"createdOn": "2024-04-29T09:44:40.222Z",
"__v": 0
},
"success": true
}

Please note picture value though its not image but file.

Testing Download API:
here we are using value of picture noted above,this should download the file

curl --location
'http://localhost:3000/demo/image/2dc4d730d659e9cd14d6c0506ada7ef9.docx'


Code of this application is available at
https://github.com/gitsangramdesai/express-mongoose-gridfs.

2024/04/28

Javascript:Validate JSON against schema

As we know when we have given a xml we can can validate it ,if it follows the
format we are looking far using xsd,similarly there is need for validatio of
json, we check given json is valid json by using JSON.parse() method but it
does not let us to check if json confirms to our own format so here is
library called ajv that precisely do that.

Code 1:
const Ajv = require("ajv")
const ajv = new Ajv()

const schema = {
type: "object",
properties: {
foo: {type: "integer"},
bar: {type: "string"}
},
required: ["foo"],
additionalProperties: false
}
const validate = ajv.compile(schema)

const data = {
foo: 1,
bar: "abc",
add:23
}
const valid = validate(data)
if (!valid) {
console.log(validate.errors)
}else{
console.log("Valid JSON")
}

Output:
[
{
instancePath: '',
schemaPath: '#/additionalProperties',
keyword: 'additionalProperties',
params: { additionalProperty: 'add' },
message: 'must NOT have additional properties'
}
]

Here our json string has additional property called add
so our json is not valid wrt our schema.


Same library can parse the json and it can do same work as
JSON.parse & also it can serialize json object into string.

Code 2:

const Ajv = require("ajv/dist/jtd")
const ajv = new Ajv() // options can be passed, e.g. {allErrors: true}

const schema = {
properties: {
foo: {type: "int32"}
},
optionalProperties: {
bar: {type: "string"}
}
}

const serialize = ajv.compileSerializer(schema)

const data = {
foo: 1,
bar: "abc"
}

console.log("Serializer:",serialize(data))

const parse = ajv.compileParser(schema)

const json = '{"foo": 1, "bar": "abc"}'
const invalidJson = '{"unknown": "abc"}'

parseAndLog(json) // logs {foo: 1, bar: "abc"}
parseAndLog(invalidJson) // logs error and position

function parseAndLog(json) {
const data = parse(json)
if (data === undefined) {
console.log("Parsing Failed:")
console.log(parse.message) // error message from the last parse call
console.log(parse.position) // error position in string
} else {
console.log("Parsing Success:")
console.log(data)
}
}

Output:
Serializer: {"foo":1,"bar":"abc"}
Parsing Success:
{ foo: 1, bar: 'abc' }
Parsing Failed:
property unknown not allowed
11

2024/04/25

MongoDb:$facet


 $facet:

Processes multiple aggregation pipelines within a single stage on the
same set of input documents. Each sub-pipeline has its own field in the
output document where its results are stored as an array of documents.

The $facet stage allows you to create multi-faceted aggregations which
characterize data across multiple dimensions, or facets, within a single
aggregation stage. Multi-faceted aggregations provide multiple filters and
categorizations to guide data browsing and analysis. Retailers commonly use
faceting to narrow search results by creating filters on product price,
manufacturer, size, etc.

Input documents are passed to the $facet stage only once. $facet enables
various aggregations on the same set of input documents, without needing to
retrieve the input documents multiple times.

We wil use teacher collection for understanding $facet.

//drop teacher collection if prexist
db.teacher.drop()

//add some sample data for teacher collection
db.teacher.insertMany([
{
name:"sagar desai",
email:"sagar.desai@example.com",
age:30,
gender:"male",
},
{
name:"sachin sawant",
email:"sagar.sawant@example.com",
age:30,
gender:"male",
},
{
name:"swara desai",
email:"swara.desai@example.com",
age:20,
gender:"female",
},
{
name:"mrunmayi sawant",
email:"mrunmayi.sawant@example.com",
age:30,
gender:"female",
},
{
name:"sandesh raorane",
email:"sandesh.raorane@example.com",
age:45,
gender:"male",
},
{
name:"Gaurav raorane",
email:"gaurav.raorane@example.com",
age:42,
gender:"male",
},
{
name:"vinayak tambe",
email:"vinayak.tambe@example.com",
age:21,
gender:"male",
},
{
name:"kiran rao",
email:"kiran.rao@example.com",
age:21,
gender:"trans-gender",
}
])


Lets first run following command
db.teacher.aggregate([
{
$bucket:{
groupBy:"$age",
boundaries:[0,30,40],
default:"GreaterThan40",
output:{
count:{"$sum":1}
}
}
}
])

Output:
[
{ _id: 0, count: 3 },
{ _id: 30, count: 3 },
{ _id: 'GreaterThan40', count: 2 }
]

db.teacher.aggregate([
{
$group:{
_id:"$gender",
count:{"$sum":1}
}
}
])
Output:
[
{ _id: 'female', count: 2 },
{ _id: 'trans-gender', count: 1 },
{ _id: 'male', count: 5 }
]


Ouput of last two queries can be done in single query using facet.
Here is how


db.teacher.aggregate([
{
$facet:{
"facetByAge":[
{
$bucket:{
groupBy:"$age",
boundaries:[0,30,40],
default:"GreaterThan40",
output:{
count:{"$sum":1}
}
}
}
],
"faceByGender":[{
$group:{
_id:"$gender",
count:{"$sum":1}
}
}]
}
}
])

Output:
[
{
facetByAge: [
{ _id: 0, count: 3 },
{ _id: 30, count: 3 },
{ _id: 'GreaterThan40', count: 2 }
],
faceByGender: [
{ _id: 'male', count: 5 },
{ _id: 'female', count: 2 },
{ _id: 'trans-gender', count: 1 }
]
}
]

We can get all document based on grouping by gender & age just
my small modification in our previous query.

db.teacher.aggregate([
{
$facet:{
"facetByAge":[
{
$bucket:{
groupBy:"$age",
boundaries:[0,30,40],
default:"GreaterThan40",
output:{
count:{"$sum":1},
teachers:{$push:{"email":"$email",
            "age":"$age",name:"$name",gender:"$gender"}}
}
}
},{
$project:{
_id:1,
count:1,
teachers:1,
}
}
],
"faceByGender":[{
$group:{
_id:"$gender",
count:{"$sum":1},
teachers:{$push:{"email":"$email",
            "age":"$age",name:"$name",gender:"$gender"}}
}
}]
}
}
])

Output:
[
{
facetByAge: [
{
_id: 0,
count: 3,
teachers: [
{
email: 'swara.desai@example.com',
age: 20,
name: 'swara desai',
gender: 'female'
},
{
email: 'vinayak.tambe@example.com',
age: 21,
name: 'vinayak tambe',
gender: 'male'
},
{
email: 'kiran.rao@example.com',
age: 21,
name: 'kiran rao',
gender: 'trans-gender'
}
]
},
{
_id: 30,
count: 3,
teachers: [
{
email: 'sagar.desai@example.com',
age: 30,
name: 'sagar desai',
gender: 'male'
},
{
email: 'sagar.sawant@example.com',
age: 30,
name: 'sachin sawant',
gender: 'male'
},
{
email: 'mrunmayi.sawant@example.com',
age: 30,
name: 'mrunmayi sawant',
gender: 'female'
}
]
},
{
_id: 'GreaterThan40',
count: 2,
teachers: [
{
email: 'sandesh.raorane@example.com',
age: 45,
name: 'sandesh raorane',
gender: 'male'
},
{
email: 'gaurav.raorane@example.com',
age: 42,
name: 'Gaurav raorane',
gender: 'male'
}
]
}
],
faceByGender: [
{
_id: 'male',
count: 5,
teachers: [
{
email: 'sagar.desai@example.com',
age: 30,
name: 'sagar desai',
gender: 'male'
},
{
email: 'sagar.sawant@example.com',
age: 30,
name: 'sachin sawant',
gender: 'male'
},
{
email: 'sandesh.raorane@example.com',
age: 45,
name: 'sandesh raorane',
gender: 'male'
},
{
email: 'gaurav.raorane@example.com',
age: 42,
name: 'Gaurav raorane',
gender: 'male'
},
{
email: 'vinayak.tambe@example.com',
age: 21,
name: 'vinayak tambe',
gender: 'male'
}
]
},
{
_id: 'female',
count: 2,
teachers: [
{
email: 'swara.desai@example.com',
age: 20,
name: 'swara desai',
gender: 'female'
},
{
email: 'mrunmayi.sawant@example.com',
age: 30,
name: 'mrunmayi sawant',
gender: 'female'
}
]
},
{
_id: 'trans-gender',
count: 1,
teachers: [
{
email: 'kiran.rao@example.com',
age: 21,
name: 'kiran rao',
gender: 'trans-gender'
}
]
}
]
}
]

In this output

_id =0 corresponds with age < 30
_id =30 corresponds with 30 <= age < 40
_id =0 corresponds with age > 40

accordingly we will use switch case to
give that in result.Below is modified query

db.teacher.aggregate([
{
$facet:{
"facetByAge":[
{
$bucket:{
groupBy:"$age",
boundaries:[0,30,40],
default:"GreaterThan40",
output:{
count:{"$sum":1},
teachers:{$push:{"email":"$email",
            "age":"$age",name:"$name",gender:"$gender"}}
}
}
},{
$project:{
_id:1,
count:1,
teachers:1,
"AgeGroup" :{
$switch:
{
branches: [
{
case: { $eq :["$_id", 0] },
then: "age < 30"
},
{
case: { $eq:["$_id",30] },
then: "30 <= age < 40"
}
],
default: "age > 40"
}
}
}
}
],
"faceByGender":[{
$group:{
_id:"$gender",
count:{"$sum":1},
teachers:{$push:{"email":"$email",
            "age":"$age",name:"$name",gender:"$gender"}}
}
}]
}
}
])

Output:
[
{
facetByAge: [
{
_id: 0,
count: 3,
teachers: [
{
email: 'swara.desai@example.com',
age: 20,
name: 'swara desai',
gender: 'female'
},
{
email: 'vinayak.tambe@example.com',
age: 21,
name: 'vinayak tambe',
gender: 'male'
},
{
email: 'kiran.rao@example.com',
age: 21,
name: 'kiran rao',
gender: 'trans-gender'
}
],
AgeGroup: 'age < 30'
},
{
_id: 30,
count: 3,
teachers: [
{
email: 'sagar.desai@example.com',
age: 30,
name: 'sagar desai',
gender: 'male'
},
{
email: 'sagar.sawant@example.com',
age: 30,
name: 'sachin sawant',
gender: 'male'
},
{
email: 'mrunmayi.sawant@example.com',
age: 30,
name: 'mrunmayi sawant',
gender: 'female'
}
],
AgeGroup: '30 <= age < 40'
},
{
_id: 'GreaterThan40',
count: 2,
teachers: [
{
email: 'sandesh.raorane@example.com',
age: 45,
name: 'sandesh raorane',
gender: 'male'
},
{
email: 'gaurav.raorane@example.com',
age: 42,
name: 'Gaurav raorane',
gender: 'male'
}
],
AgeGroup: 'age > 40'
}
],
faceByGender: [
{
_id: 'male',
count: 5,
teachers: [
{
email: 'sagar.desai@example.com',
age: 30,
name: 'sagar desai',
gender: 'male'
},
{
email: 'sagar.sawant@example.com',
age: 30,
name: 'sachin sawant',
gender: 'male'
},
{
email: 'sandesh.raorane@example.com',
age: 45,
name: 'sandesh raorane',
gender: 'male'
},
{
email: 'gaurav.raorane@example.com',
age: 42,
name: 'Gaurav raorane',
gender: 'male'
},
{
email: 'vinayak.tambe@example.com',
age: 21,
name: 'vinayak tambe',
gender: 'male'
}
]
},
{
_id: 'female',
count: 2,
teachers: [
{
email: 'swara.desai@example.com',
age: 20,
name: 'swara desai',
gender: 'female'
},
{
email: 'mrunmayi.sawant@example.com',
age: 30,
name: 'mrunmayi sawant',
gender: 'female'
}
]
},
{
_id: 'trans-gender',
count: 1,
teachers: [
{
email: 'kiran.rao@example.com',
age: 21,
name: 'kiran rao',
gender: 'trans-gender'
}
]
}
]
}
]

Usually in aggregations output of previous operation is input to
next operation,but in facet all section get same input.

MongoDb: $sortByCount operator

$sortByCount:
Groups incoming documents based on the value of a specified expression,
then computes the count of documents in each distinct group.

Each output document contains two fields: an _id field containing the
distinct grouping value, and a count field containing the number of
documents belonging to that grouping or category.

The documents are sorted by count in descending order.

db.exhibits.insertMany([
{ "_id" : 1, "title" : "The Pillars of Society", "artist" : "Grosz", "year" : 1926, "tags" : [ "painting", "satire", "Expressionism", "caricature" ] },
{ "_id" : 2, "title" : "Melancholy III", "artist" : "Munch", "year" : 1902, "tags" : [ "woodcut", "Expressionism" ] },
{ "_id" : 3, "title" : "Dancer", "artist" : "Miro", "year" : 1925, "tags" : [ "oil", "Surrealism", "painting" ] },
{ "_id" : 4, "title" : "The Great Wave off Kanagawa", "artist" : "Hokusai", "tags" : [ "woodblock", "ukiyo-e" ] },
{ "_id" : 5, "title" : "The Persistence of Memory", "artist" : "Dali", "year" : 1931, "tags" : [ "Surrealism", "painting", "oil" ] },
{ "_id" : 6, "title" : "Composition VII", "artist" : "Kandinsky", "year" : 1913, "tags" : [ "oil", "painting", "abstract" ] },
{ "_id" : 7, "title" : "The Scream", "artist" : "Munch", "year" : 1893, "tags" : [ "Expressionism", "painting", "oil" ] },
{ "_id" : 8, "title" : "Blue Flower", "artist" : "O'Keefe", "year" : 1918, "tags" : [ "abstract", "painting" ] }
])


db.exhibits.aggregate( [
{ $unwind: "$tags" }
])

Output:
[
{
_id: 1,
title: 'The Pillars of Society',
artist: 'Grosz',
year: 1926,
tags: 'painting'
},
{
_id: 1,
title: 'The Pillars of Society',
artist: 'Grosz',
year: 1926,
tags: 'satire'
},
{
_id: 1,
title: 'The Pillars of Society',
artist: 'Grosz',
year: 1926,
tags: 'Expressionism'
},
{
_id: 1,
title: 'The Pillars of Society',
artist: 'Grosz',
year: 1926,
tags: 'caricature'
},
{
_id: 2,
title: 'Melancholy III',
artist: 'Munch',
year: 1902,
tags: 'woodcut'
},
{
_id: 2,
title: 'Melancholy III',
artist: 'Munch',
year: 1902,
tags: 'Expressionism'
},
{ _id: 3, title: 'Dancer', artist: 'Miro', year: 1925, tags: 'oil' },
{
_id: 3,
title: 'Dancer',
artist: 'Miro',
year: 1925,
tags: 'Surrealism'
},
{
_id: 3,
title: 'Dancer',
artist: 'Miro',
year: 1925,
tags: 'painting'
},
{
_id: 4,
title: 'The Great Wave off Kanagawa',
artist: 'Hokusai',
tags: 'woodblock'
},
{
_id: 4,
title: 'The Great Wave off Kanagawa',
artist: 'Hokusai',
tags: 'ukiyo-e'
},
{
_id: 5,
title: 'The Persistence of Memory',
artist: 'Dali',
year: 1931,
tags: 'Surrealism'
},
{
_id: 5,
title: 'The Persistence of Memory',
artist: 'Dali',
year: 1931,
tags: 'painting'
},
{
_id: 5,
title: 'The Persistence of Memory',
artist: 'Dali',
year: 1931,
tags: 'oil'
},
{
_id: 6,
title: 'Composition VII',
artist: 'Kandinsky',
year: 1913,
tags: 'oil'
},
{
_id: 6,
title: 'Composition VII',
artist: 'Kandinsky',
year: 1913,
tags: 'painting'
},
{
_id: 6,
title: 'Composition VII',
artist: 'Kandinsky',
year: 1913,
tags: 'abstract'
},
{
_id: 7,
title: 'The Scream',
artist: 'Munch',
year: 1893,
tags: 'Expressionism'
},
{
_id: 7,
title: 'The Scream',
artist: 'Munch',
year: 1893,
tags: 'painting'
},
{
_id: 7,
title: 'The Scream',
artist: 'Munch',
year: 1893,
tags: 'oil'
}
]
Actual query:

Gives count based on tags & sort documents
descending order based on count.

db.exhibits.aggregate( [
{ $unwind: "$tags" },
{ $sortByCount: "$tags" }
])


Output:
[
{ _id: 'painting', count: 6 },
{ _id: 'oil', count: 4 },
{ _id: 'Expressionism', count: 3 },
{ _id: 'abstract', count: 2 },
{ _id: 'Surrealism', count: 2 },
{ _id: 'ukiyo-e', count: 1 },
{ _id: 'woodcut', count: 1 },
{ _id: 'woodblock', count: 1 },
{ _id: 'caricature', count: 1 },
{ _id: 'satire', count: 1 }
]