105866a16SShwetha Bhandari/*
205866a16SShwetha Bhandari * math64.h provides the 64 bit unsigned integer add, multiply followed by  modulo operation
305866a16SShwetha Bhandari * The linux/math64.h provides divide and multiply 64 bit integers but:
405866a16SShwetha Bhandari * 1. multiply: mul_u64_u64_shr - only returns 64 bits of the result and has to be called
505866a16SShwetha Bhandari *                     twice to get the complete 128 bits of the result.
605866a16SShwetha Bhandari * 2. Modulo operation of the result of  addition and multiplication of u64 that may result
705866a16SShwetha Bhandari *                        in integers > 64 bits is not supported
805866a16SShwetha Bhandari * Hence this header to combine add/multiply followed by modulo of u64 integrers
905866a16SShwetha Bhandari * always resulting in u64.
1005866a16SShwetha Bhandari *
1185b528e0SShwetha * Copyright (c) 2016 Cisco and/or its affiliates.
1205866a16SShwetha Bhandari * Licensed under the Apache License, Version 2.0 (the "License");
1305866a16SShwetha Bhandari * you may not use this file except in compliance with the License.
1405866a16SShwetha Bhandari * You may obtain a copy of the License at:
1505866a16SShwetha Bhandari *
1605866a16SShwetha Bhandari *     http://www.apache.org/licenses/LICENSE-2.0
1705866a16SShwetha Bhandari *
1805866a16SShwetha Bhandari * Unless required by applicable law or agreed to in writing, software
1905866a16SShwetha Bhandari * distributed under the License is distributed on an "AS IS" BASIS,
2005866a16SShwetha Bhandari * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
2105866a16SShwetha Bhandari * See the License for the specific language governing permissions and
2205866a16SShwetha Bhandari * limitations under the License.
2305866a16SShwetha Bhandari */
2405866a16SShwetha Bhandari#ifndef include_vnet_math64_h
2505866a16SShwetha Bhandari#define include_vnet_math64_h
2605866a16SShwetha Bhandari#include <stdint.h>
2705866a16SShwetha Bhandari
2805866a16SShwetha Bhandari/*
2905866a16SShwetha Bhandari * multiplies and returns result in hi and lo
3005866a16SShwetha Bhandari */
3105866a16SShwetha Bhandaristatic inline void mul64by64(u64 a, u64 b, u64 * hi, u64 * lo)
3205866a16SShwetha Bhandari{
3305866a16SShwetha Bhandari    u64 a_lo = (u64) (uint32_t) a;
3405866a16SShwetha Bhandari    u64 a_hi = a >> 32;
3505866a16SShwetha Bhandari    u64 b_lo = (u64) (u32) b;
3605866a16SShwetha Bhandari    u64 b_hi = b >> 32;
3705866a16SShwetha Bhandari
3805866a16SShwetha Bhandari    u64 p0 = a_lo * b_lo;
3905866a16SShwetha Bhandari    u64 p1 = a_lo * b_hi;
4005866a16SShwetha Bhandari    u64 p2 = a_hi * b_lo;
4105866a16SShwetha Bhandari    u64 p3 = a_hi * b_hi;
4205866a16SShwetha Bhandari
4305866a16SShwetha Bhandari    u32 cy = (u32) (((p0 >> 32) + (u32) p1 + (u32) p2) >> 32);
4405866a16SShwetha Bhandari
4505866a16SShwetha Bhandari    *lo = p0 + (p1 << 32) + (p2 << 32);
4605866a16SShwetha Bhandari    *hi = p3 + (p1 >> 32) + (p2 >> 32) + cy;
4705866a16SShwetha Bhandari    return;
4805866a16SShwetha Bhandari}
4905866a16SShwetha Bhandari
5005866a16SShwetha Bhandari#define TWO64 18446744073709551616.0
5105866a16SShwetha Bhandari
5205866a16SShwetha Bhandaristatic inline u64 mod128by64(u64 x, u64 y, u64 m, double di)
5305866a16SShwetha Bhandari{
5405866a16SShwetha Bhandari    u64 q1, q2, q;
5505866a16SShwetha Bhandari    u64 p1, p0;
5605866a16SShwetha Bhandari    double dq;
5705866a16SShwetha Bhandari
5805866a16SShwetha Bhandari    /* calculate quotient first pass 53 bits */
5905866a16SShwetha Bhandari    dq = (TWO64 * (double)x + (double)y) * di;
6005866a16SShwetha Bhandari
6105866a16SShwetha Bhandari    if (dq >= TWO64)
6205866a16SShwetha Bhandari        q1 = 0xfffffffffffff800L;
6305866a16SShwetha Bhandari    else
6405866a16SShwetha Bhandari        q1 = dq;
6505866a16SShwetha Bhandari
6605866a16SShwetha Bhandari    /* q1 * m to compare the product to the dividend.  */
6705866a16SShwetha Bhandari    mul64by64(q1, m, &p1, &p0);
6805866a16SShwetha Bhandari
6905866a16SShwetha Bhandari    /* Adjust quotient. is it > actual result: */
7005866a16SShwetha Bhandari    if (x < p1 || (x == p1 && y < p0))
7105866a16SShwetha Bhandari    {
7205866a16SShwetha Bhandari        /* q1 > quotient.  calculate abs remainder */
7305866a16SShwetha Bhandari        x = p1 - (x + (p0 < y));
7405866a16SShwetha Bhandari        y = p0 - y;
7505866a16SShwetha Bhandari
7605866a16SShwetha Bhandari        /* use the remainder as new dividend to adjust quotient */
7705866a16SShwetha Bhandari        q2 = (u64) ((TWO64 * (double)x + (double)y) * di);
7805866a16SShwetha Bhandari        mul64by64(q2, m, &p1, &p0);
7905866a16SShwetha Bhandari
8005866a16SShwetha Bhandari        q = q1 - q2;
8105866a16SShwetha Bhandari        if (x < p1 || (x == p1 && y <= p0))
8205866a16SShwetha Bhandari        {
8305866a16SShwetha Bhandari            y = p0 - y;
8405866a16SShwetha Bhandari        }
8505866a16SShwetha Bhandari        else
8605866a16SShwetha Bhandari        {
8705866a16SShwetha Bhandari            y = p0 - y;
8805866a16SShwetha Bhandari            y += m;
8905866a16SShwetha Bhandari            q--;
9005866a16SShwetha Bhandari        }
9105866a16SShwetha Bhandari    }
9205866a16SShwetha Bhandari    else
9305866a16SShwetha Bhandari    {
9405866a16SShwetha Bhandari        x = x - (p1 + (y < p0));
9505866a16SShwetha Bhandari        y = y - p0;
9605866a16SShwetha Bhandari
9705866a16SShwetha Bhandari        q2 = (u64) ((TWO64 * (double)x + (double)y) * di);
9805866a16SShwetha Bhandari        mul64by64(q2, m, &p1, &p0);
9905866a16SShwetha Bhandari
10005866a16SShwetha Bhandari        q = q1 + q2;
10105866a16SShwetha Bhandari        if (x < p1 || (x == p1 && y < p0))
10205866a16SShwetha Bhandari        {
10305866a16SShwetha Bhandari            y = y - p0;
10405866a16SShwetha Bhandari            y += m;
10505866a16SShwetha Bhandari            q--;
10605866a16SShwetha Bhandari        }
10705866a16SShwetha Bhandari        else
10805866a16SShwetha Bhandari        {
10905866a16SShwetha Bhandari            y = y - p0;
11005866a16SShwetha Bhandari            if (y >= m)
11105866a16SShwetha Bhandari            {
11205866a16SShwetha Bhandari                y -= m;
11305866a16SShwetha Bhandari                q++;
11405866a16SShwetha Bhandari            }
11505866a16SShwetha Bhandari        }
11605866a16SShwetha Bhandari    }
11705866a16SShwetha Bhandari
11805866a16SShwetha Bhandari    return y;
11905866a16SShwetha Bhandari}
12005866a16SShwetha Bhandari
12105866a16SShwetha Bhandari/*
12205866a16SShwetha Bhandari * returns a % p
12305866a16SShwetha Bhandari */
12405866a16SShwetha Bhandaristatic inline u64 mod64by64(u64 a, u64 p, u64 primeinv)
12505866a16SShwetha Bhandari{
12605866a16SShwetha Bhandari    return (mod128by64(0, a, p, primeinv));
12705866a16SShwetha Bhandari}
12805866a16SShwetha Bhandari
12905866a16SShwetha Bhandaristatic inline void add64(u64 a, u64 b, u64 * whi, u64 * wlo)
13005866a16SShwetha Bhandari{
13105866a16SShwetha Bhandari    *wlo = a + b;
13205866a16SShwetha Bhandari    if (*wlo < a)
13305866a16SShwetha Bhandari        *whi = 1;
13405866a16SShwetha Bhandari
13505866a16SShwetha Bhandari}
13605866a16SShwetha Bhandari
13705866a16SShwetha Bhandari/*
13805866a16SShwetha Bhandari * returns (a + b)%p
13905866a16SShwetha Bhandari */
14005866a16SShwetha Bhandaristatic inline u64 add64_mod(u64 a, u64 b, u64 p, double pi)
14105866a16SShwetha Bhandari{
14205866a16SShwetha Bhandari    u64 shi = 0, slo = 0;
14305866a16SShwetha Bhandari
14405866a16SShwetha Bhandari    add64(a, b, &shi, &slo);
14505866a16SShwetha Bhandari    return (mod128by64(shi, slo, p, pi));
14605866a16SShwetha Bhandari}
14705866a16SShwetha Bhandari
14805866a16SShwetha Bhandari/*
14905866a16SShwetha Bhandari * returns (ab) % p
15005866a16SShwetha Bhandari */
15105866a16SShwetha Bhandaristatic inline u64 mul64_mod(u64 a, u64 b, u64 p, double pi)
15205866a16SShwetha Bhandari{
15305866a16SShwetha Bhandari    u64 phi = 0, plo = 0;
15405866a16SShwetha Bhandari
15505866a16SShwetha Bhandari    mul64by64(a, b, &phi, &plo);
15605866a16SShwetha Bhandari    return (mod128by64(phi, plo, p, pi));
15705866a16SShwetha Bhandari}
15805866a16SShwetha Bhandari
15905866a16SShwetha Bhandari#endif
160